public static string LearnXpathFromTrainingFiles(string filesLocation) { DomPool.LoadDocuments(filesLocation); DomPool.Initiate(); DomPool.ExtractAllFeatures(); DecisionNode dn = new DecisionNode(); dn.InitialNodeSet = new HashSet <HtmlNode>(DomPool.TargetNodes.Union(DomPool.NonTargetNodes)); dn.SelectedNegative = new HashSet <HtmlNode>(DomPool.NonTargetNodes.Except(DomPool.TargetNodesPrecision)); dn.SelectedPositive = new HashSet <HtmlNode>(DomPool.TargetNodes); dn.FeatureSet = new HashSet <Feature>(); dn.CalculateEntropy(); DecisionTreeLearning.RecursiveTreeImprovement(dn); return(XpathTools.GenerateAForgivingXpath(dn)); //"//*["+XpathTools.DecisionTreeToXpath(dn,new HashSet<Feature>())+"]"; }
public static void RunTest(string filesLocation) { string[] folders = Directory.GetDirectories(filesLocation); foreach (string fldr in folders) { Console.WriteLine("Running for category:" + fldr); string[] innerfolders = Directory.GetDirectories(fldr); foreach (string innerdir in innerfolders) { Console.Out.Flush(); Console.WriteLine("Running for att:" + innerdir); DomPool.LoadTestDocuments(innerdir.Replace(filesLocation, "testset")); DomPool.LoadDocuments(innerdir); //for(int i= (DomPool.allDocsNames.Count() - 1); i <= (DomPool.allDocsNames.Count()-1)/*DomPool.allDocsNames.Count()*/; i++) for (int i = 1; i <= (DomPool.allDocsNames.Count() - 1); i++) { string[] tools = new string[] { "our", "our - not forgiving", "j48", "svm", "xpath-align", "svm" }; int toolStart = 5; Dictionary <string, string> xpathNonForgiving = new Dictionary <string, string>(); for (int tool = toolStart; tool < 6; tool++) { Console.WriteLine("[-] running for training set size=" + i); IEnumerable <IEnumerable <int> > subsetsIndexes = Subsets(DomPool.allDocsNames.Count(), i); //Reduce size ...for testing only //subsetsIndexes = subsetsIndexes.Take(30); double totalAccuracy = 0; double totalRecall = 0; long totalTime = 0; Console.WriteLine("[-] tool:" + tools[tool]); Console.WriteLine("+ will run " + subsetsIndexes.Count() + " different iterations for the current set size"); int s = 0; Dictionary <String, double> SiteTotalRecall = new Dictionary <string, double>(); Dictionary <String, double> SiteTotalPrecision = new Dictionary <string, double>(); Dictionary <String, double> SiteTotalTests = new Dictionary <string, double>(); foreach (string site in DomPool.allDocsNames) { SiteTotalPrecision[site] = 0; SiteTotalRecall[site] = 0; SiteTotalTests[site] = 0; } foreach (IEnumerable <int> currSubsetIndexes in subsetsIndexes) { List <int> listRep = new List <int>(currSubsetIndexes); string stringRep = listRep.Aggregate("", (b, x) => b + "," + x); s++; if (s % 10 == 0) { //Console.Write("(" + s + "/" + subsetsIndexes.Count() + ") "); Console.Write("."); } //if (tool == toolStart) //{ HashSet <String> currSubset = GetSubSet(DomPool.allDocsNames, currSubsetIndexes); DomPool.Initiate(currSubset); DomPool.ExtractAllFeatures(); //} var runres = new HashSet <HtmlNode>(); //our method if (tool < 2) { string xpath = ""; if (tool == 0) { DecisionNode dn = new DecisionNode(); dn.InitialNodeSet = new HashSet <HtmlNode>(DomPool.TargetNodes.Union(DomPool.NonTargetNodes)); dn.SelectedNegative = new HashSet <HtmlNode>(DomPool.NonTargetNodes.Except(DomPool.TargetNodesPrecision)); dn.SelectedPositive = new HashSet <HtmlNode>(DomPool.TargetNodes); dn.FeatureSet = new HashSet <Feature>(); dn.CalculateEntropy(); DecisionTreeLearning.RecursiveTreeImprovement(dn); xpath = XpathTools.GenerateAForgivingXpath(dn); xpathNonForgiving[stringRep] = XpathTools.DecisionTreeToXpath(dn, new HashSet <Feature>(), 1); xpathNonForgiving[stringRep] = "//*" + (xpathNonForgiving[stringRep].Equals("") ? "" : ("[" + xpathNonForgiving[stringRep] + "]")); } if (tool == 1) { xpath = xpathNonForgiving[stringRep]; } Console.WriteLine("Query:" + xpath); var watch = Stopwatch.StartNew(); runres = DomPool.TESTRunXpathQuery(xpath); watch.Stop(); var elapsedMs = watch.ElapsedMilliseconds; totalTime = totalTime + elapsedMs; } else { if (tool == 2) { ModelLearner model = new ModelLearner(); model.LearnModel(); var watch = Stopwatch.StartNew(); runres = model.RunOnTestSet(); watch.Stop(); var elapsedMs = watch.ElapsedMilliseconds; totalTime = totalTime + elapsedMs; } else { if (tool == 3) { NB model = new NB(); model.LearnModel(); var watch = Stopwatch.StartNew(); runres = model.RunOnTestSet(); watch.Stop(); var elapsedMs = watch.ElapsedMilliseconds; totalTime = totalTime + elapsedMs; } else { if (tool == 4) { XpathAlignment model = new XpathAlignment(); model.LearnModel(); var watch = Stopwatch.StartNew(); runres = model.RunOnTestSet(); watch.Stop(); var elapsedMs = watch.ElapsedMilliseconds; totalTime = totalTime + elapsedMs; } else { SVM model = new SVM(); model.LearnModel(); var watch = Stopwatch.StartNew(); runres = model.RunOnTestSet(); watch.Stop(); var elapsedMs = watch.ElapsedMilliseconds; totalTime = totalTime + elapsedMs; } } } } HashSet <HtmlNode> spos = new HashSet <HtmlNode>(DomPool.TESTTargetNodes.Intersect(runres)); HashSet <HtmlNode> sposprecision = new HashSet <HtmlNode>(DomPool.TESTTargetNodesPrecision.Intersect(runres)); foreach (var entry in DomPool.docsAndNames) { if (DomPool.trainingDocsNames.Contains(entry.Key)) { continue; } HashSet <HtmlNode> docNodes = new HashSet <HtmlNode>(entry.Value.SelectNodes("//*")); HashSet <HtmlNode> currspos = new HashSet <HtmlNode>(spos.Intersect(docNodes)); HashSet <HtmlNode> currrunres = new HashSet <HtmlNode>(runres.Intersect(docNodes)); HashSet <HtmlNode> currsposprecision = new HashSet <HtmlNode>(sposprecision.Intersect(docNodes)); HashSet <HtmlNode> currTargetNodes = new HashSet <HtmlNode>(DomPool.TESTTargetNodes.Intersect(docNodes)); double currSiteAccuracy = (currsposprecision.Count() / ((double)currrunres.Count())); double currSiteRecall = (currspos.Count() / ((double)currTargetNodes.Count())); if (((double)currrunres.Count()) > 0) { SiteTotalPrecision[entry.Key] = SiteTotalPrecision[entry.Key] + currSiteAccuracy; SiteTotalRecall[entry.Key] = SiteTotalRecall[entry.Key] + currSiteRecall; } SiteTotalTests[entry.Key] = SiteTotalTests[entry.Key] + 1; } double currAccuracy = (sposprecision.Count() / ((double)runres.Count())); double currRecall = (spos.Count() / ((double)DomPool.TESTTargetNodes.Count())); if (runres.Count() > 0) { totalAccuracy = totalAccuracy + currAccuracy; totalRecall = totalRecall + currRecall; } } totalAccuracy = totalAccuracy / subsetsIndexes.Count(); totalRecall = totalRecall / subsetsIndexes.Count(); Console.WriteLine("########## Results " + tools[tool] + " for i=" + i + "##########"); Console.WriteLine("+++++++++ Detailed Results for i=" + i + "++++++++++#"); double count = 0; double totalSumPrecision = 0; double totalSumRecall = 0; double avgRecall = 0; double avgPrecision = 0; double avgFscore = 0; double numPrecision = 0; foreach (string site in DomPool.allDocsNames) { if (SiteTotalTests[site] < 1) { SiteTotalTests[site]++; } else { numPrecision++; } double sitePrecision = SiteTotalPrecision[site] / SiteTotalTests[site]; double siteRecall = SiteTotalRecall[site] / SiteTotalTests[site]; double siteFscore = 2 * (sitePrecision * siteRecall) / (sitePrecision + siteRecall); if (siteRecall == 0 && sitePrecision == 0) { siteFscore = 0; } count++; avgRecall = avgRecall + siteRecall; avgPrecision = avgPrecision + sitePrecision; avgFscore = avgFscore + siteFscore; Console.WriteLine(">" + site + ": Precision:" + sitePrecision + " , Recall:" + siteRecall + ", F-score:" + siteFscore); } Console.WriteLine("++++++++++++++++Total+++++++++++++++++"); avgRecall = avgRecall / count; avgPrecision = avgPrecision / numPrecision; avgFscore = avgFscore / count; Console.WriteLine("Recall:" + avgRecall); Console.WriteLine("Precision:" + avgPrecision); Console.WriteLine("F-score:" + avgFscore); Console.WriteLine("Time:" + totalTime); } } } } Console.ReadLine(); }
static void Main(string[] args) { Console.WriteLine("T for test, R for Run, S for seen overall testing and O for overall testing:"); string res = ReadLine(); if (res.ToLower().Trim().Equals("huge")) { TestSites.TestAllSites(); return; } if (res.ToLower().Trim().Equals("t")) { DomPool.LoadDocuments(FILES_LOCATION); DomPool.Initiate(); Console.WriteLine("insert query:"); string q = ReadLine(); while (!q.Equals("exit")) { var runres = DomPool.RunXpathQuery(q); if (runres != null) { Console.WriteLine("result size" + runres.Count()); HashSet <HtmlNode> spos = new HashSet <HtmlNode>(DomPool.TargetNodes.Intersect(runres)); HashSet <HtmlNode> sposprecision = new HashSet <HtmlNode>(DomPool.TargetNodesPrecision.Intersect(runres)); foreach (var entry in DomPool.docsAndNames) { HashSet <HtmlNode> docNodes = new HashSet <HtmlNode>(entry.Value.SelectNodes("//*")); HashSet <HtmlNode> currspos = new HashSet <HtmlNode>(spos.Intersect(docNodes)); HashSet <HtmlNode> currrunres = new HashSet <HtmlNode>(runres.Intersect(docNodes)); HashSet <HtmlNode> currsposprecision = new HashSet <HtmlNode>(sposprecision.Intersect(docNodes)); HashSet <HtmlNode> currTargetNodes = new HashSet <HtmlNode>(DomPool.TargetNodes.Intersect(docNodes)); Console.WriteLine(entry.Key + "-Accuracy:" + (currsposprecision.Count() / ((double)currrunres.Count())) + ". Recall:" + (currspos.Count() / ((double)currTargetNodes.Count())) + ""); } Console.WriteLine("Accuracy:" + (sposprecision.Count() / ((double)runres.Count())) + ". Recall:" + (spos.Count() / ((double)DomPool.TargetNodes.Count())) + ""); } else { Console.WriteLine("null"); } Console.WriteLine("insert query:"); q = ReadLine(); } } else { if (res.ToLower().Trim().Equals("r")) { Console.WriteLine(LearnXpathWrapper.LearnXpathFromTrainingFiles(FILES_LOCATION)); Console.ReadLine(); } else { if (res.ToLower().Trim().Equals("s")) { Console.WriteLine("Output is redirected to resultsSeen.txt in the debug dir"); //write results to text file instead of windows FileStream fs = new FileStream("resultsSeen.txt", FileMode.Create); StreamWriter sw = new StreamWriter(fs); TextWriter tmp = Console.Out; Console.SetOut(sw); OverallSeenTesting.RunTest(FILES_LOCATION); Console.SetOut(tmp); sw.Flush(); sw.Close(); } else { if (res.ToLower().Trim().Equals("archive")) { Console.WriteLine("Output is redirected to results.txt in the debug dir"); //write results to text file instead of windows FileStream fs = new FileStream("archive2-results.txt", FileMode.Create); StreamWriter sw = new StreamWriter(fs); TextWriter tmp = Console.Out; Console.SetOut(sw); OverallArchive2Testing.RunTest(ARCHIVE_FILES_LOCATION); Console.SetOut(tmp); sw.Flush(); sw.Close(); } else { if (res.ToLower().Trim().Equals("a")) { Console.WriteLine("Please enter file name to parse:"); string fnp = ReadLine().Trim(); parseres.learn(fnp); parseres.save("parsed" + fnp); } else { Console.WriteLine("Output is redirected to results.txt in the debug dir"); //write results to text file instead of windows FileStream fs = new FileStream("results.txt", FileMode.Create); StreamWriter sw = new StreamWriter(fs); TextWriter tmp = Console.Out; Console.SetOut(sw); OverallTesting.RunTest(FILES_LOCATION); Console.SetOut(tmp); sw.Flush(); sw.Close(); } } } } } }
public static void PorcessSites(List <SiteInfo> siteinfos) { foreach (SiteInfo si in siteinfos) { try { int leftPages = pagesNum * si.attributeExtraction.Keys.Count(); List <HtmlNode> sitePages = new List <HtmlNode>(0); if (!SiteDocuments.ContainsKey(si.SiteName)) { SiteDocuments[si.SiteName] = new Dictionary <string, Dictionary <String, HtmlNode> >(); SiteLinks[si.SiteName] = new Dictionary <string, HashSet <String> >(); foreach (String attrName in si.attributeExtraction.Keys) { SiteDocuments[si.SiteName][attrName] = new Dictionary <string, HtmlNode>(); SiteLinks[si.SiteName][attrName] = new HashSet <string>(); } } //Download all URLs foreach (String startURL in si.StartURLs) { try { String currURL = startURL; while (currURL != null) { HtmlDocument doc = new HtmlDocument(); var currHTML = URLDownloader.GetHtmlOfURL(currURL); doc.LoadHtml(currHTML); if (!String.IsNullOrEmpty(si.PageExtractionXpath)) { var links = doc.DocumentNode.SelectNodes(si.PageExtractionXpath); foreach (HtmlNode lnk in links) { try { String pageLink = lnk.Attributes["href"].Value; // var htmlstr = URLDownloader.GetHtmlOfURL(URLDownloader.UrlFixIfRelative(pageLink, currURL)); foreach (string attr in si.attributeExtraction.Keys) { var newURL = URLDownloader.UrlFixIfRelative(pageLink, currURL); if (SiteLinks[si.SiteName][attr].Contains(newURL)) { continue; } //HtmlDocument innerDoc = new HtmlDocument(); //innerDoc.LoadHtml(htmlstr); //SiteDocuments[si.SiteName][attr].Add(pageLink, innerDoc.DocumentNode); SiteLinks[si.SiteName][attr].Add(newURL); if (--leftPages <= 0) { break; } } if (leftPages <= 0) { break; } } catch (Exception e) { // Console.WriteLine(e.StackTrace); } } } else { foreach (string attr in si.attributeExtraction.Keys) { try { if (SiteLinks[si.SiteName][attr].Contains(currURL)) { continue; } // HtmlDocument innerDoc = new HtmlDocument(); // innerDoc.LoadHtml(currHTML); // SiteDocuments[si.SiteName][attr].Add(currURL, innerDoc.DocumentNode); SiteLinks[si.SiteName][attr].Add(currURL); if (--leftPages <= 0) { break; } } catch { break; } } } if (leftPages <= 0) { break; } //get next page String nextLink = null; try { nextLink = HttpUtility.HtmlDecode(doc.DocumentNode.SelectSingleNode(si.NextPageXPath).Attributes["href"].Value); } catch { nextLink = null; } if (nextLink != null) { nextLink = URLDownloader.UrlFixIfRelative(nextLink, currURL); } if (!currURL.ToLower().Trim().Equals(nextLink.ToLower().Trim())) { currURL = nextLink; } else { break; } } } catch { } } foreach (String attr in si.attributeExtraction.Keys) { var trainingkeys = new HashSet <String>(SiteLinks[si.SiteName][attr].Take(5)); //new HashSet<String>(SiteDocuments[si.SiteName][attr].Keys.Take(5)); var trainingDic = new Dictionary <String, HtmlNode>(); //SiteDocuments[si.SiteName][attr].Where(x => trainingkeys.Contains(x.Key)).ToDictionary(kv => kv.Key, kv => kv.Value); foreach (String lnk in trainingkeys) { trainingDic.Add(lnk, GetHtmlNode(lnk)); } var testDic = SiteDocuments[si.SiteName][attr].Where(x => !trainingkeys.Contains(x.Key)).ToDictionary(kv => kv.Key, kv => kv.Value); foreach (var lnk in trainingDic.Keys) { HtmlNode adoc = trainingDic[lnk]; try { var gt = adoc.SelectNodes(si.attributeExtraction[attr]); if (gt != null) { foreach (var targetNode in gt) { //Console.Write(":"); if (targetNode.Attributes.Contains("userselected")) { targetNode.SetAttributeValue("userselected", "yes"); } else { targetNode.Attributes.Add("userselected", "yes"); } } } } catch { } MD5 md5 = MD5.Create(); if (!File.Exists("huge/" + si.SiteName + "/training/" + attr + "/" + getMD5(lnk) + ".html")) { Directory.CreateDirectory("huge/" + si.SiteName + "/training/" + attr); File.WriteAllText("huge/" + si.SiteName + "/training/" + attr + "/" + getMD5(lnk) + ".html", adoc.InnerHtml); } } DomPool.LoadDocuments(trainingDic); //DomPool.LoadTestDocuments(); DomPool.Initiate(new HashSet <string>(trainingDic.Keys)); DomPool.ExtractAllFeatures(); // Run code DecisionNode dn = new DecisionNode(); dn.InitialNodeSet = new HashSet <HtmlNode>(DomPool.TargetNodes.Union(DomPool.NonTargetNodes)); dn.SelectedNegative = new HashSet <HtmlNode>(DomPool.NonTargetNodes.Except(DomPool.TargetNodesPrecision)); dn.SelectedPositive = new HashSet <HtmlNode>(DomPool.TargetNodes); dn.FeatureSet = new HashSet <Feature>(); dn.CalculateEntropy(); DecisionTreeLearning.RecursiveTreeImprovement(dn); var xpath = XpathTools.GenerateAForgivingXpath(dn); var xpathNonForgiving = XpathTools.DecisionTreeToXpath(dn, new HashSet <Feature>(), 1); xpathNonForgiving = "//*" + (xpathNonForgiving.Equals("") ? "" : ("[" + xpathNonForgiving + "]")); XpathAlignment model = new XpathAlignment(); model.LearnModel(); var alignmentXpath = model.xpath; CheckOnTest(new HashSet <string>(SiteLinks[si.SiteName][attr].Except(trainingkeys)), xpath, si.attributeExtraction[attr], si.SiteName, attr, "ForgivingXP"); CheckOnTest(new HashSet <string>(SiteLinks[si.SiteName][attr].Except(trainingkeys)), alignmentXpath, si.attributeExtraction[attr], si.SiteName, attr, "Alignment"); } }finally{ } SiteDocuments.Remove(si.SiteName); } Console.ReadLine(); }