/// <summary> /// Refreshes the node metadata in all trees in the forest. /// </summary> public void RefreshMetadata() { _leafCount = 0; _levelCount = 0; _testCounts = new Dictionary <string, int>(); for (byte i = 0; i < _numTrees; i++) { DecisionTree <T, D> tree = _trees[i]; _leafCount = tree.SetTreeLabel(i, _leafCount); _levelCount = Math.Max(_levelCount, tree.LevelCount); Dictionary <string, int> testCounts = tree.TestCounts; foreach (string key in testCounts.Keys) { if (!_testCounts.ContainsKey(key)) { _testCounts[key] = 0; } _testCounts[key] += testCounts[key]; } } }
/// <summary> /// Trains a decision forest from <paramref name="splits"/> based on the provided parameters using the depth first algorithm. /// </summary> /// <param name="numTrees">Number of trees in the forest</param> /// <param name="splits">Data splits to use when training the tree.</param> /// <param name="factory">The feature factory</param> /// <param name="numFeatures">The number of features to try for each node</param> /// <param name="numThresholds">The number of thresholds to try for each node</param> /// <param name="labelNames">The names for the labels</param> /// <param name="labelWeights">An array of weights for each label</param> /// <returns>The trained forest</returns> public static DecisionForest <T, D> ComputeDepthFirst( int numTrees, List <T>[] splits, IFeatureFactory <T, D> factory, int numFeatures, int numThresholds, string[] labelNames, float[] labelWeights ) { int numLabels = labelNames.Length; DecisionTree <T, D>[] trees = new DecisionTree <T, D> [numTrees]; int count = 0; var indices = Enumerable.Range(0, numTrees).Select(o => (byte)o); if (splits[0][0] is IComparable <T> ) { foreach (var split in splits) { split.Sort(); } } foreach (var i in indices) { int split = i % splits.Length; UpdateManager.WriteLine(string.Format("Training tree {0} of {1}...", i + 1, numTrees)); trees[i] = DecisionTree <T, D> .ComputeDepthFirst(splits[split], factory, numFeatures, numThresholds, numLabels, labelWeights); trees[i].LabelCount = labelNames.Length; count = trees[i].SetTreeLabel(i, count); UpdateManager.WriteLine("\ndone"); } ; UpdateManager.WriteLine("Training complete"); return(new DecisionForest <T, D>(trees, labelNames)); }
/// <summary> /// Classifies each point from <paramref name="image"/> and trackes which nodes it visits. /// </summary> /// <param name="tree">The tree used for the computation</param> /// <param name="image">Image to add to the tree</param> /// <param name="mode">Mode to use when sampling the image</param> public static void Fill <T>(this DecisionTree <ImageDataPoint <T>, T[]> tree, LabeledImage <T> image, BackgroundSampleMode mode) { List <ImageDataPoint <T> > points = image.CreateAllDataPoints(mode); tree.Fill(points); }
/// <summary> /// Classifies each pixel of <paramref name="image"/> and produces a corresponding <see cref="T:LabelImage" />. The maximum likelihood label /// is chosen at each pixel. /// </summary> /// <param name="tree">The tree used for the computation</param> /// <param name="image">Image to classify</param> /// <returns>A label image with all of the classifications</returns> public static LabelImage Classify <T>(this DecisionTree <ImageDataPoint <T>, T[]> tree, IMultichannelImage <T> image) { return(tree.ClassifySoft(image).ToLabelImage()); }