コード例 #1
0
        /// <summary>
        /// Construit un classificateur de test en cherchant la meilleure
        /// valeur du seuil.
        /// FeatureIndex est le numero d'index de la caractéristique.
        /// </summary>
        public static TestWeakClassifier Train(TestImage[] Tests, double ValidWeight,
		                                       FeatureValues Feature)
        {
            if (Feature.Values == null) // Uncached values
                Feature.Values = FeatureValue.ComputeAllValuesSorted(Tests, Feature.Feature);

            // With the default values, the positive classifier says always
            // no. So it scores wrong for all valid tests.
            var positiveError = ValidWeight;

            // Iterate all feature's values, ascending
            var best = new TestWeakClassifier(Feature, Feature.Values[0].Value, 1, positiveError);

            // Select the threshold with the lowest error weight
            for (var iTest = 0; iTest < Feature.Values.Length; iTest++) {
                if (Tests[Feature.Values[iTest].TestIndex].Valid) {
                    positiveError -= Tests[Feature.Values[iTest].TestIndex].Weight;

                    if (positiveError < best.Errors) {
                        best = new TestWeakClassifier(Feature, Feature.Values[iTest].Value + 1,
                                                      1, positiveError);
                    }
                } else {
                    positiveError += Tests[Feature.Values[iTest].TestIndex].Weight;

                    var negativeError = 1.0 - positiveError;

                    if (negativeError < best.Errors) {
                        best = new TestWeakClassifier(Feature, Feature.Values[iTest].Value - 1,
                                                      -1, negativeError);
                    }
                }
            }

            return best;
        }
コード例 #2
0
        /// <summary>
        /// Construit un arbre de détection à partir d'un dossier
        /// contenant des images de test valides et non valides.
        /// </summary>
        public static StrongClassifier Train(string TestsDir)
        {
            Console.WriteLine("Init trainer ...");

            // Load default weights, integral images and compute features
            // values foreach test image of the tests dirs.
            // Training tests are resized to the window size.
            Console.WriteLine("Load trainer tests ...");
            var start = DateTime.Now;
            var testsSet = LoadTestsSet(TestsDir);
            var tests = testsSet.Item1;
            var featuresValues = testsSet.Item2;

            var nCachedFeatures = featuresValues.Where((feature) => feature.Values != null)
                                               .Count();

            Console.WriteLine("{0} tests loaded in {1} secs ({2}% cached)", tests.Length,
                              (DateTime.Now - start).TotalSeconds,
                              nCachedFeatures * 100 / featuresValues.Length);

            Console.WriteLine("Ok. Let's start the neurons lazer powered firing machine ...");

            WeakClassifier[] classifiers = new WeakClassifier[Config.LearnPass];
            var iPass = 1;
            while (iPass <= Config.LearnPass) {
                start = DateTime.Now;
                Console.WriteLine("{0}/{1} trainer pass ...", iPass, Config.LearnPass);

                // Normalize the weights of the images to get
                // a rational distribution
                var weightsSum = tests.Aggregate(0.0, (acc, test) => test.Weight + acc);
                var validWeight = 0.0;
                for (var iTest = 0; iTest < tests.Length; iTest++) {
                    tests[iTest].Weight = tests[iTest].Weight / weightsSum;

                    if (tests[iTest].Valid)
                        validWeight += tests[iTest].Weight;
                }

                // Select the feature with the lowest error level for the new
                // set of tests's weights
                TestWeakClassifier best = new TestWeakClassifier(featuresValues[0], 0, 1, double.MaxValue);
                //foreach (var feature in featuresValues) {
                Parallel.ForEach(featuresValues, (feature) => {
                    var newClassifier = TestWeakClassifier.Train(tests, validWeight, feature);

                    if (best.Errors > newClassifier.Errors)
                        best = newClassifier;
                });
                //}

                Console.WriteLine("New weak classifier selected in {0} secs (error score: {1})",
                                  (DateTime.Now - start).TotalSeconds, best.Errors);
                Console.WriteLine("X: {0} Y: {1} - Width : {2} Height : {3}",
                                  best.Feature.Feature.Frame.TopLeft.X,
                                  best.Feature.Feature.Frame.TopLeft.Y,
                                  best.Feature.Feature.Frame.Width,
                                  best.Feature.Feature.Frame.Height);

                // Update the weights
                var beta = best.Errors / (1.0 - best.Errors);
                if (beta < 1e-8) // Unallow alpha to be > 18
                    beta = 1e-8;

                // Reduce the weights of valid checks
                foreach (var featureValue in best.Feature.Values) {
                    if (best.Check(featureValue.Value) == tests[featureValue.TestIndex].Valid)
                        tests[featureValue.TestIndex].Weight *= beta;
                }

                // Add the new weak classifier to the strong classifier
                var alpha = Math.Log(1.0 / beta); // Weak classifier weight inside the strong classifier
                classifiers[iPass - 1] = best.GetClassifier(alpha);

                iPass += 1;
            }

            Console.WriteLine("Detector's brain ready to go");

            return new StrongClassifier(classifiers);
        }