public void PlayGolfExample() { //http://www.saedsayad.com/decision_tree.htm var outlook = new int[] { 0, 0, 1, 2, 2, 2, 1, 0, 0, 2, 0, 1, 1, 2 }; var temp = new int[] { 0, 0, 0, 1, 2, 2, 2, 1, 2, 1, 1, 1, 0, 1 }; var humidity = new int[] { 0, 0, 0, 0, 1, 1, 1, 0, 1, 1, 1, 0, 1, 0 }; var wind = new int[] { 0, 1, 0, 0, 0, 1, 1, 0, 0, 0, 1, 1, 0, 1 }; var output = new int[] { 0, 0, 1, 1, 1, 0, 1, 0, 1, 1, 1, 1, 1, 0 }; DecisionTreeLearning <int> dtl = DecisionTreeLearning <int> .Create <int>(x => output[x], new DecisionTreeOptions() { MaxDepth = 3, }); var outlookStrs = new string[] { "Rainy", "Overcast", "Sunny" }; dtl.AddDiscreteFeature <string>("outlook", a => outlookStrs[outlook[a]]); var tempStrs = new string[] { "Hot", "Mild", "Cool" }; dtl.AddDiscreteFeature <string>("temp", a => tempStrs[temp[a]]); dtl.AddDiscreteFeature <int>("humidity", a => humidity[a]); dtl.AddDiscreteFeature <int>("wind", a => wind[a]); var dt = dtl.Learn(Enumerable.Range(0, output.Length)); Assert.AreEqual(dt.Feature.Name, "outlook"); Assert.AreEqual(dt.ChildNodes.First(x => (x.Key as DiscreteFeatureValue).Value.ToString() == "Rainy").Value.Feature.Name, "humidity"); Assert.AreEqual(dt.ChildNodes.First(x => (x.Key as DiscreteFeatureValue).Value.ToString() == "Sunny").Value.Feature.Name, "wind"); Assert.IsNull(dt.ChildNodes.First(x => (x.Key as DiscreteFeatureValue).Value.ToString() == "Overcast").Value.Feature, "Overcast node should be leaf"); Assert.AreEqual(dt.GetOutput(0), 0); Assert.AreEqual(dt.GetOutput(2), 1); }
public ISolver simpleSeparation(int percentTrain) { int sizeTrainDataset = Convert.ToInt32(InputData.Length * ((double)percentTrain / 100)); int sizeTestDataset = InputData.Length - sizeTrainDataset; float[][] trainInputDataset = new float[sizeTrainDataset][]; float[][] testInputDataset = new float[InputData.Length - sizeTrainDataset][]; float[] trainOutputDataset = new float[sizeTrainDataset]; float[] testOutputDataset = new float[InputData.Length - sizeTrainDataset]; Array.Copy(InputData, trainInputDataset, sizeTrainDataset); Array.Copy(InputData, sizeTrainDataset, testInputDataset, 0, sizeTestDataset); Array.Copy(OutputData, trainOutputDataset, sizeTrainDataset); Array.Copy(OutputData, sizeTrainDataset, testOutputDataset, 0, sizeTestDataset); if (ISolver is INeuralNetwork) { LearningAlgoManager la = new LearningAlgoManager() { usedAlgo = LS.LearningAlgorithmName, GeneticParams = LS.LAParameters }; ClosingError = la.startLearn(ISolver, trainInputDataset, trainOutputDataset); } else if (ISolver is DecisionTree) { DecisionTreeLearning la = new DecisionTreeLearning(); ClosingError = la.startLearn(ISolver, trainInputDataset, trainOutputDataset); } PreprocessingManager preprocessing = new PreprocessingManager(); mistakeTrain = 0; List <string> expectedOutputValues = trainOutputDataset.Select(x => Convert.ToString(x)).ToList(); List <string> obtainedOutputValues = new List <string>(); for (int i = 0; i < sizeTrainDataset; i++) { obtainedOutputValues.Add(Convert.ToString(ISolver.Solve(trainInputDataset[i])[0])); } List <bool> comparisonOfResult = preprocessing.compareExAndObValues(expectedOutputValues, obtainedOutputValues, SelectionID, ParameterID); var counts = comparisonOfResult.GroupBy(x => x).ToDictionary(x => x.Key, x => x.Count()); mistakeTrain = (float)counts[false] / (float)sizeTrainDataset; mistakeTest = 0; expectedOutputValues = testOutputDataset.Select(x => Convert.ToString(x)).ToList(); obtainedOutputValues.Clear(); for (int i = 0; i < sizeTestDataset; i++) { obtainedOutputValues.Add(Convert.ToString(ISolver.Solve(testInputDataset[i])[0])); } comparisonOfResult = preprocessing.compareExAndObValues(expectedOutputValues, obtainedOutputValues, SelectionID, ParameterID); counts = comparisonOfResult.GroupBy(x => x).ToDictionary(x => x.Key, x => x.Count()); mistakeTest = (float)counts[false] / (float)sizeTestDataset; return(ISolver); }
public void ContiniousFeatureTest() { var data = Enumerable.Range(0, 1000).Select(x => new PatientRecord()).ToArray(); var dt = DecisionTreeLearning <PatientRecord> .Create <int>(x => x.Cancer? 1 : 0, new DecisionTreeOptions()); dt.AddContiniousFeature("Age", x => x.Age); dt.AddContiniousFeature("Height", x => x.Height); dt.AddContiniousFeature("Weight", x => x.Weight); dt.AddDiscreteFeature("Smoke", x => x.Smoke ? 1 : 0); var node = dt.Learn(data); Assert.AreEqual(node.Feature.Name, "Smoke"); }
public static string LearnXpathFromTrainingFiles(string filesLocation) { DomPool.LoadDocuments(filesLocation); DomPool.Initiate(); DomPool.ExtractAllFeatures(); DecisionNode dn = new DecisionNode(); dn.InitialNodeSet = new HashSet <HtmlNode>(DomPool.TargetNodes.Union(DomPool.NonTargetNodes)); dn.SelectedNegative = new HashSet <HtmlNode>(DomPool.NonTargetNodes.Except(DomPool.TargetNodesPrecision)); dn.SelectedPositive = new HashSet <HtmlNode>(DomPool.TargetNodes); dn.FeatureSet = new HashSet <Feature>(); dn.CalculateEntropy(); DecisionTreeLearning.RecursiveTreeImprovement(dn); return(XpathTools.GenerateAForgivingXpath(dn)); //"//*["+XpathTools.DecisionTreeToXpath(dn,new HashSet<Feature>())+"]"; }
public static void RunTest(string filesLocation) { string[] folders = Directory.GetDirectories(filesLocation); foreach (string fldr in folders) { Console.WriteLine("Running for category:" + fldr); string[] innerfolders = Directory.GetDirectories(fldr); foreach (string innerdir in innerfolders) { Console.Out.Flush(); Console.WriteLine("Running for att:" + innerdir); DomPool.LoadTestDocuments(innerdir.Replace(filesLocation, "testset")); DomPool.LoadDocuments(innerdir); //for(int i= (DomPool.allDocsNames.Count() - 1); i <= (DomPool.allDocsNames.Count()-1)/*DomPool.allDocsNames.Count()*/; i++) for (int i = 1; i <= (DomPool.allDocsNames.Count() - 1); i++) { string[] tools = new string[] { "our", "our - not forgiving", "j48", "svm", "xpath-align", "svm" }; int toolStart = 5; Dictionary <string, string> xpathNonForgiving = new Dictionary <string, string>(); for (int tool = toolStart; tool < 6; tool++) { Console.WriteLine("[-] running for training set size=" + i); IEnumerable <IEnumerable <int> > subsetsIndexes = Subsets(DomPool.allDocsNames.Count(), i); //Reduce size ...for testing only //subsetsIndexes = subsetsIndexes.Take(30); double totalAccuracy = 0; double totalRecall = 0; long totalTime = 0; Console.WriteLine("[-] tool:" + tools[tool]); Console.WriteLine("+ will run " + subsetsIndexes.Count() + " different iterations for the current set size"); int s = 0; Dictionary <String, double> SiteTotalRecall = new Dictionary <string, double>(); Dictionary <String, double> SiteTotalPrecision = new Dictionary <string, double>(); Dictionary <String, double> SiteTotalTests = new Dictionary <string, double>(); foreach (string site in DomPool.allDocsNames) { SiteTotalPrecision[site] = 0; SiteTotalRecall[site] = 0; SiteTotalTests[site] = 0; } foreach (IEnumerable <int> currSubsetIndexes in subsetsIndexes) { List <int> listRep = new List <int>(currSubsetIndexes); string stringRep = listRep.Aggregate("", (b, x) => b + "," + x); s++; if (s % 10 == 0) { //Console.Write("(" + s + "/" + subsetsIndexes.Count() + ") "); Console.Write("."); } //if (tool == toolStart) //{ HashSet <String> currSubset = GetSubSet(DomPool.allDocsNames, currSubsetIndexes); DomPool.Initiate(currSubset); DomPool.ExtractAllFeatures(); //} var runres = new HashSet <HtmlNode>(); //our method if (tool < 2) { string xpath = ""; if (tool == 0) { DecisionNode dn = new DecisionNode(); dn.InitialNodeSet = new HashSet <HtmlNode>(DomPool.TargetNodes.Union(DomPool.NonTargetNodes)); dn.SelectedNegative = new HashSet <HtmlNode>(DomPool.NonTargetNodes.Except(DomPool.TargetNodesPrecision)); dn.SelectedPositive = new HashSet <HtmlNode>(DomPool.TargetNodes); dn.FeatureSet = new HashSet <Feature>(); dn.CalculateEntropy(); DecisionTreeLearning.RecursiveTreeImprovement(dn); xpath = XpathTools.GenerateAForgivingXpath(dn); xpathNonForgiving[stringRep] = XpathTools.DecisionTreeToXpath(dn, new HashSet <Feature>(), 1); xpathNonForgiving[stringRep] = "//*" + (xpathNonForgiving[stringRep].Equals("") ? "" : ("[" + xpathNonForgiving[stringRep] + "]")); } if (tool == 1) { xpath = xpathNonForgiving[stringRep]; } Console.WriteLine("Query:" + xpath); var watch = Stopwatch.StartNew(); runres = DomPool.TESTRunXpathQuery(xpath); watch.Stop(); var elapsedMs = watch.ElapsedMilliseconds; totalTime = totalTime + elapsedMs; } else { if (tool == 2) { ModelLearner model = new ModelLearner(); model.LearnModel(); var watch = Stopwatch.StartNew(); runres = model.RunOnTestSet(); watch.Stop(); var elapsedMs = watch.ElapsedMilliseconds; totalTime = totalTime + elapsedMs; } else { if (tool == 3) { NB model = new NB(); model.LearnModel(); var watch = Stopwatch.StartNew(); runres = model.RunOnTestSet(); watch.Stop(); var elapsedMs = watch.ElapsedMilliseconds; totalTime = totalTime + elapsedMs; } else { if (tool == 4) { XpathAlignment model = new XpathAlignment(); model.LearnModel(); var watch = Stopwatch.StartNew(); runres = model.RunOnTestSet(); watch.Stop(); var elapsedMs = watch.ElapsedMilliseconds; totalTime = totalTime + elapsedMs; } else { SVM model = new SVM(); model.LearnModel(); var watch = Stopwatch.StartNew(); runres = model.RunOnTestSet(); watch.Stop(); var elapsedMs = watch.ElapsedMilliseconds; totalTime = totalTime + elapsedMs; } } } } HashSet <HtmlNode> spos = new HashSet <HtmlNode>(DomPool.TESTTargetNodes.Intersect(runres)); HashSet <HtmlNode> sposprecision = new HashSet <HtmlNode>(DomPool.TESTTargetNodesPrecision.Intersect(runres)); foreach (var entry in DomPool.docsAndNames) { if (DomPool.trainingDocsNames.Contains(entry.Key)) { continue; } HashSet <HtmlNode> docNodes = new HashSet <HtmlNode>(entry.Value.SelectNodes("//*")); HashSet <HtmlNode> currspos = new HashSet <HtmlNode>(spos.Intersect(docNodes)); HashSet <HtmlNode> currrunres = new HashSet <HtmlNode>(runres.Intersect(docNodes)); HashSet <HtmlNode> currsposprecision = new HashSet <HtmlNode>(sposprecision.Intersect(docNodes)); HashSet <HtmlNode> currTargetNodes = new HashSet <HtmlNode>(DomPool.TESTTargetNodes.Intersect(docNodes)); double currSiteAccuracy = (currsposprecision.Count() / ((double)currrunres.Count())); double currSiteRecall = (currspos.Count() / ((double)currTargetNodes.Count())); if (((double)currrunres.Count()) > 0) { SiteTotalPrecision[entry.Key] = SiteTotalPrecision[entry.Key] + currSiteAccuracy; SiteTotalRecall[entry.Key] = SiteTotalRecall[entry.Key] + currSiteRecall; } SiteTotalTests[entry.Key] = SiteTotalTests[entry.Key] + 1; } double currAccuracy = (sposprecision.Count() / ((double)runres.Count())); double currRecall = (spos.Count() / ((double)DomPool.TESTTargetNodes.Count())); if (runres.Count() > 0) { totalAccuracy = totalAccuracy + currAccuracy; totalRecall = totalRecall + currRecall; } } totalAccuracy = totalAccuracy / subsetsIndexes.Count(); totalRecall = totalRecall / subsetsIndexes.Count(); Console.WriteLine("########## Results " + tools[tool] + " for i=" + i + "##########"); Console.WriteLine("+++++++++ Detailed Results for i=" + i + "++++++++++#"); double count = 0; double totalSumPrecision = 0; double totalSumRecall = 0; double avgRecall = 0; double avgPrecision = 0; double avgFscore = 0; double numPrecision = 0; foreach (string site in DomPool.allDocsNames) { if (SiteTotalTests[site] < 1) { SiteTotalTests[site]++; } else { numPrecision++; } double sitePrecision = SiteTotalPrecision[site] / SiteTotalTests[site]; double siteRecall = SiteTotalRecall[site] / SiteTotalTests[site]; double siteFscore = 2 * (sitePrecision * siteRecall) / (sitePrecision + siteRecall); if (siteRecall == 0 && sitePrecision == 0) { siteFscore = 0; } count++; avgRecall = avgRecall + siteRecall; avgPrecision = avgPrecision + sitePrecision; avgFscore = avgFscore + siteFscore; Console.WriteLine(">" + site + ": Precision:" + sitePrecision + " , Recall:" + siteRecall + ", F-score:" + siteFscore); } Console.WriteLine("++++++++++++++++Total+++++++++++++++++"); avgRecall = avgRecall / count; avgPrecision = avgPrecision / numPrecision; avgFscore = avgFscore / count; Console.WriteLine("Recall:" + avgRecall); Console.WriteLine("Precision:" + avgPrecision); Console.WriteLine("F-score:" + avgFscore); Console.WriteLine("Time:" + totalTime); } } } } Console.ReadLine(); }
public static void PorcessSites(List <SiteInfo> siteinfos) { foreach (SiteInfo si in siteinfos) { try { int leftPages = pagesNum * si.attributeExtraction.Keys.Count(); List <HtmlNode> sitePages = new List <HtmlNode>(0); if (!SiteDocuments.ContainsKey(si.SiteName)) { SiteDocuments[si.SiteName] = new Dictionary <string, Dictionary <String, HtmlNode> >(); SiteLinks[si.SiteName] = new Dictionary <string, HashSet <String> >(); foreach (String attrName in si.attributeExtraction.Keys) { SiteDocuments[si.SiteName][attrName] = new Dictionary <string, HtmlNode>(); SiteLinks[si.SiteName][attrName] = new HashSet <string>(); } } //Download all URLs foreach (String startURL in si.StartURLs) { try { String currURL = startURL; while (currURL != null) { HtmlDocument doc = new HtmlDocument(); var currHTML = URLDownloader.GetHtmlOfURL(currURL); doc.LoadHtml(currHTML); if (!String.IsNullOrEmpty(si.PageExtractionXpath)) { var links = doc.DocumentNode.SelectNodes(si.PageExtractionXpath); foreach (HtmlNode lnk in links) { try { String pageLink = lnk.Attributes["href"].Value; // var htmlstr = URLDownloader.GetHtmlOfURL(URLDownloader.UrlFixIfRelative(pageLink, currURL)); foreach (string attr in si.attributeExtraction.Keys) { var newURL = URLDownloader.UrlFixIfRelative(pageLink, currURL); if (SiteLinks[si.SiteName][attr].Contains(newURL)) { continue; } //HtmlDocument innerDoc = new HtmlDocument(); //innerDoc.LoadHtml(htmlstr); //SiteDocuments[si.SiteName][attr].Add(pageLink, innerDoc.DocumentNode); SiteLinks[si.SiteName][attr].Add(newURL); if (--leftPages <= 0) { break; } } if (leftPages <= 0) { break; } } catch (Exception e) { // Console.WriteLine(e.StackTrace); } } } else { foreach (string attr in si.attributeExtraction.Keys) { try { if (SiteLinks[si.SiteName][attr].Contains(currURL)) { continue; } // HtmlDocument innerDoc = new HtmlDocument(); // innerDoc.LoadHtml(currHTML); // SiteDocuments[si.SiteName][attr].Add(currURL, innerDoc.DocumentNode); SiteLinks[si.SiteName][attr].Add(currURL); if (--leftPages <= 0) { break; } } catch { break; } } } if (leftPages <= 0) { break; } //get next page String nextLink = null; try { nextLink = HttpUtility.HtmlDecode(doc.DocumentNode.SelectSingleNode(si.NextPageXPath).Attributes["href"].Value); } catch { nextLink = null; } if (nextLink != null) { nextLink = URLDownloader.UrlFixIfRelative(nextLink, currURL); } if (!currURL.ToLower().Trim().Equals(nextLink.ToLower().Trim())) { currURL = nextLink; } else { break; } } } catch { } } foreach (String attr in si.attributeExtraction.Keys) { var trainingkeys = new HashSet <String>(SiteLinks[si.SiteName][attr].Take(5)); //new HashSet<String>(SiteDocuments[si.SiteName][attr].Keys.Take(5)); var trainingDic = new Dictionary <String, HtmlNode>(); //SiteDocuments[si.SiteName][attr].Where(x => trainingkeys.Contains(x.Key)).ToDictionary(kv => kv.Key, kv => kv.Value); foreach (String lnk in trainingkeys) { trainingDic.Add(lnk, GetHtmlNode(lnk)); } var testDic = SiteDocuments[si.SiteName][attr].Where(x => !trainingkeys.Contains(x.Key)).ToDictionary(kv => kv.Key, kv => kv.Value); foreach (var lnk in trainingDic.Keys) { HtmlNode adoc = trainingDic[lnk]; try { var gt = adoc.SelectNodes(si.attributeExtraction[attr]); if (gt != null) { foreach (var targetNode in gt) { //Console.Write(":"); if (targetNode.Attributes.Contains("userselected")) { targetNode.SetAttributeValue("userselected", "yes"); } else { targetNode.Attributes.Add("userselected", "yes"); } } } } catch { } MD5 md5 = MD5.Create(); if (!File.Exists("huge/" + si.SiteName + "/training/" + attr + "/" + getMD5(lnk) + ".html")) { Directory.CreateDirectory("huge/" + si.SiteName + "/training/" + attr); File.WriteAllText("huge/" + si.SiteName + "/training/" + attr + "/" + getMD5(lnk) + ".html", adoc.InnerHtml); } } DomPool.LoadDocuments(trainingDic); //DomPool.LoadTestDocuments(); DomPool.Initiate(new HashSet <string>(trainingDic.Keys)); DomPool.ExtractAllFeatures(); // Run code DecisionNode dn = new DecisionNode(); dn.InitialNodeSet = new HashSet <HtmlNode>(DomPool.TargetNodes.Union(DomPool.NonTargetNodes)); dn.SelectedNegative = new HashSet <HtmlNode>(DomPool.NonTargetNodes.Except(DomPool.TargetNodesPrecision)); dn.SelectedPositive = new HashSet <HtmlNode>(DomPool.TargetNodes); dn.FeatureSet = new HashSet <Feature>(); dn.CalculateEntropy(); DecisionTreeLearning.RecursiveTreeImprovement(dn); var xpath = XpathTools.GenerateAForgivingXpath(dn); var xpathNonForgiving = XpathTools.DecisionTreeToXpath(dn, new HashSet <Feature>(), 1); xpathNonForgiving = "//*" + (xpathNonForgiving.Equals("") ? "" : ("[" + xpathNonForgiving + "]")); XpathAlignment model = new XpathAlignment(); model.LearnModel(); var alignmentXpath = model.xpath; CheckOnTest(new HashSet <string>(SiteLinks[si.SiteName][attr].Except(trainingkeys)), xpath, si.attributeExtraction[attr], si.SiteName, attr, "ForgivingXP"); CheckOnTest(new HashSet <string>(SiteLinks[si.SiteName][attr].Except(trainingkeys)), alignmentXpath, si.attributeExtraction[attr], si.SiteName, attr, "Alignment"); } }finally{ } SiteDocuments.Remove(si.SiteName); } Console.ReadLine(); }