public StrongClassifier(WeakClassifier[] Classifiers)
        {
            this.Classifiers = Classifiers;

            Func<double, WeakClassifier, double> sum = (acc, classifier) =>
                acc + classifier.Alpha;

            this.GlobalAlpha = this.Classifiers.Aggregate(0.0, sum);
        }
        /// <summary>
        /// Construit un arbre de détection à partir d'un dossier
        /// contenant des images de test valides et non valides.
        /// </summary>
        public static StrongClassifier Train(string TestsDir)
        {
            Console.WriteLine("Init trainer ...");

            // Load default weights, integral images and compute features
            // values foreach test image of the tests dirs.
            // Training tests are resized to the window size.
            Console.WriteLine("Load trainer tests ...");
            var start = DateTime.Now;
            var testsSet = LoadTestsSet(TestsDir);
            var tests = testsSet.Item1;
            var featuresValues = testsSet.Item2;

            var nCachedFeatures = featuresValues.Where((feature) => feature.Values != null)
                                               .Count();

            Console.WriteLine("{0} tests loaded in {1} secs ({2}% cached)", tests.Length,
                              (DateTime.Now - start).TotalSeconds,
                              nCachedFeatures * 100 / featuresValues.Length);

            Console.WriteLine("Ok. Let's start the neurons lazer powered firing machine ...");

            WeakClassifier[] classifiers = new WeakClassifier[Config.LearnPass];
            var iPass = 1;
            while (iPass <= Config.LearnPass) {
                start = DateTime.Now;
                Console.WriteLine("{0}/{1} trainer pass ...", iPass, Config.LearnPass);

                // Normalize the weights of the images to get
                // a rational distribution
                var weightsSum = tests.Aggregate(0.0, (acc, test) => test.Weight + acc);
                var validWeight = 0.0;
                for (var iTest = 0; iTest < tests.Length; iTest++) {
                    tests[iTest].Weight = tests[iTest].Weight / weightsSum;

                    if (tests[iTest].Valid)
                        validWeight += tests[iTest].Weight;
                }

                // Select the feature with the lowest error level for the new
                // set of tests's weights
                TestWeakClassifier best = new TestWeakClassifier(featuresValues[0], 0, 1, double.MaxValue);
                //foreach (var feature in featuresValues) {
                Parallel.ForEach(featuresValues, (feature) => {
                    var newClassifier = TestWeakClassifier.Train(tests, validWeight, feature);

                    if (best.Errors > newClassifier.Errors)
                        best = newClassifier;
                });
                //}

                Console.WriteLine("New weak classifier selected in {0} secs (error score: {1})",
                                  (DateTime.Now - start).TotalSeconds, best.Errors);
                Console.WriteLine("X: {0} Y: {1} - Width : {2} Height : {3}",
                                  best.Feature.Feature.Frame.TopLeft.X,
                                  best.Feature.Feature.Frame.TopLeft.Y,
                                  best.Feature.Feature.Frame.Width,
                                  best.Feature.Feature.Frame.Height);

                // Update the weights
                var beta = best.Errors / (1.0 - best.Errors);
                if (beta < 1e-8) // Unallow alpha to be > 18
                    beta = 1e-8;

                // Reduce the weights of valid checks
                foreach (var featureValue in best.Feature.Values) {
                    if (best.Check(featureValue.Value) == tests[featureValue.TestIndex].Valid)
                        tests[featureValue.TestIndex].Weight *= beta;
                }

                // Add the new weak classifier to the strong classifier
                var alpha = Math.Log(1.0 / beta); // Weak classifier weight inside the strong classifier
                classifiers[iPass - 1] = best.GetClassifier(alpha);

                iPass += 1;
            }

            Console.WriteLine("Detector's brain ready to go");

            return new StrongClassifier(classifiers);
        }