public void Train(double[][,] images, List <WeakClassifier> weakClassifiers, bool[] results, int featureCount) { if (results.Count(r => r) == 0) { return; } if (results.Count(r => !r) == 0) { return; } var integralImages = IntegrateImages(images); var negativeCount = results.ToList().Where(r => r == false).Count(); var positiveCount = results.ToList().Where(r => r == true).Count(); WeakClassifiers = new List <WeakClassifier>(); Betas = new List <double>(); Weights = new List <double>(); // Strong classifier Weights InitializeWeights(results); Console.WriteLine("Training started..."); WeakClassifier minFeature = null; // Weak classifiers loop for (int featureIndex = 0; featureIndex < featureCount; featureIndex++) { // Normalize the weights var weightSum = ImagesWeights[featureIndex].Sum(); for (int i = 0; i < images.Length; i++) { ImagesWeights[featureIndex][i] /= weightSum; } // Train each weak classifier in parallel weakClassifiers.AsParallel().ForAll(wc => { wc.Train(ref integralImages, ref results, ImagesWeights[featureIndex]); }); // Choosing weak classifier with lowest error minFeature = weakClassifiers.OrderBy(wc => wc.Error).First(); // Saving weak classifier WeakClassifiers.Add(minFeature); weakClassifiers.Remove(minFeature); // Calc Betas and Weights Betas.Add(minFeature.Error / (1 - minFeature.Error)); Weights.Add(Math.Log(1.0 / Betas[featureIndex])); // Weights update ImagesWeights.Add(new double[integralImages.Length]); for (int i = 0; i < integralImages.Length; i++) { var featureResult = minFeature.Detect(integralImages[i]); if (featureResult == results[i]) { ImagesWeights[featureIndex + 1][i] = ImagesWeights[featureIndex][i] * Betas[featureIndex]; } else { ImagesWeights[featureIndex + 1][i] = ImagesWeights[featureIndex][i]; } } Console.WriteLine("Feature #{0}. Error: {1}", featureIndex + 1, minFeature.Error); } var scores = CalcScores(integralImages, results); DetermineThreshold(ref scores); Statistics(images, results); Console.WriteLine("Training finished."); }
public void AddWeakClassifier(double[][,] images, bool[] results, List <WeakClassifier> weakClassifiers) { if (results.Count(r => r) == 0) { return; } if (results.Count(r => !r) == 0) { return; } if (ImagesWeights == null) { ImagesWeights = new List <double[]>(); } if (Weights == null) { Weights = new List <double>(); } if (WeakClassifiers == null) { WeakClassifiers = new List <WeakClassifier>(); } if (Betas == null) { Betas = new List <double>(); } if (ImagesWeights.Count == 0) { InitializeWeights(results); } var integralImages = IntegrateImages(images); var negativeCount = results.ToList().Where(r => r == false).Count(); var positiveCount = results.ToList().Where(r => r == true).Count(); WeakClassifier minFeature = null; // Normalize the weights var weightSum = ImagesWeights.Last().Sum(); for (int i = 0; i < images.Length; i++) { ImagesWeights.Last()[i] /= weightSum; } // Train each weak classifier in parallel weakClassifiers.AsParallel().ForAll(wc => { wc.Train(ref integralImages, ref results, ImagesWeights.Last()); }); // Choosing weak classifier with lowest error minFeature = weakClassifiers.OrderBy(wc => wc.Error).First(); if (double.IsNaN(minFeature.Error)) { throw new Exception("Error is NaN"); } // Saving weak classifier WeakClassifiers.Add(minFeature); weakClassifiers.Remove(minFeature); // Calc Betas and Weights Betas.Add(minFeature.Error / (1 - minFeature.Error)); Weights.Add(Math.Log(1.0 / Betas.Last())); // Weights update ImagesWeights.Add(new double[integralImages.Length]); for (int i = 0; i < integralImages.Length; i++) { var featureResult = minFeature.Detect(integralImages[i]); if (featureResult == results[i]) { ImagesWeights.Last()[i] = ImagesWeights[ImagesWeights.Count - 2][i] * Betas[ImagesWeights.Count - 2]; } else { ImagesWeights.Last()[i] = ImagesWeights[ImagesWeights.Count - 2][i]; } } var scores = CalcScores(integralImages, results); DetermineThreshold(ref scores); var detectionRate = 0.0; var falsePositiveRate = 0.0; Evaluate(integralImages, results, out falsePositiveRate, out detectionRate); Console.WriteLine("Weak Classifier #{0} was added. Error: {1} Detection Rate: {2} False Positive Rate: {3}", WeakClassifiers.Count, Math.Round(minFeature.Error, 5).ToString().PadRight(10), Math.Round(detectionRate, 4).ToString().PadRight(10), Math.Round(falsePositiveRate, 4).ToString().PadRight(10)); }