Beispiel #1
0
        public object Clone()
        {
            WeakClassifier newWeak = (WeakClassifier)this.MemberwiseClone();

            newWeak._featureValues = (MyFloat[])this._featureValues.Clone();
            return(newWeak);
        }
Beispiel #2
0
        internal static WeakClassifier Load(float threshold, bool posLargeThanThreshold, float weight, System.Drawing.Rectangle[] rects, int[] weights, ColorType colorType)
        {
            HaarFeature    feature = new HaarFeature(0, rects, weights, colorType);
            WeakClassifier weak    = new WeakClassifier(feature);

            weak._threshold             = threshold;
            weak._posLargeThanThreshold = posLargeThanThreshold;
            weak._weight = weight;

            return(weak);
        }
Beispiel #3
0
        private void UpdateWeights(WeakClassifier bestClassifier, int numPos, int numNeg, float[] sampleWeight)
        {
            int    count = numNeg + numPos;
            double err   = 0;
            int    i;

            bool[] classifyResult = new bool[count];
            bool   result;

            for (i = 0; i < numPos; i++)
            {
                result = bestClassifier.Predict(i);
                if (result == false)
                {
                    err += sampleWeight[i];    //累加分类错误的样本权值
                }
                classifyResult[i] = result;
            }
            for (i = numPos; i < count; i++)
            {
                result = bestClassifier.Predict(i);
                if (result == true)
                {
                    err += sampleWeight[i];    //累加分类错误的样本权值
                }
                classifyResult[i] = !result;
            }

            //调整样本权值
            //             logRatio( double val )
            // {
            //     const double eps = 1e-5;
            //
            //     val = max( val, eps );
            //     val = min( val, 1. - eps );
            //     return log( val/(1. - val) );
            // }
            const double eps = (double)1e-5;
            double       val = err;

            val = Math.Max(val, eps);
            val = Math.Min(val, 1 - eps);
            err = val;
            MyFloat factor = (MyFloat)(err / (1 - err));

            for (int indexSample = 0; indexSample < count; indexSample++)
            {
                if (classifyResult[indexSample])
                {
                    sampleWeight[indexSample] *= factor;
                }
            }
            bestClassifier.Weight = -(MyFloat)Math.Log10(factor);  //设定弱分类器权值α
        }
Beispiel #4
0
        private static StageClassifier LoadStageClassifier(int id, XmlNode stageNode)
        {
            MyFloat threshold = MyFloat.Parse(stageNode.Attributes["Threshold"].Value);
            int     weakNum   = int.Parse(stageNode.Attributes["ClassifiersNum"].Value);

            WeakClassifier[] weaks = new WeakClassifier[weakNum];
            for (int i = 0; i < weakNum; i++)
            {
                XmlNode weakNode = stageNode.ChildNodes[i];
                weaks[i] = LoadWeakClassifier(weakNode);
            }

            StageClassifier stage = StageClassifier.Load(id, threshold, weaks);

            return(stage);
        }
Beispiel #5
0
        private static WeakClassifier LoadWeakClassifier(XmlNode weakNode)
        {
            MyFloat   threshold             = MyFloat.Parse(weakNode.Attributes["Threshold"].Value);
            MyFloat   weight                = MyFloat.Parse(weakNode.Attributes["Weight"].Value);
            bool      posLargeThanThreshold = bool.Parse(weakNode.Attributes["PosLargeThanThreshold"].Value);
            ColorType colorType             = ColorType.Null;

            if (weakNode.Attributes["ColorType"] != null)
            {
                string colorValue = weakNode.Attributes["ColorType"].Value;
                if (colorValue.Contains(ColorType.Gray.ToString()))
                {
                    colorType |= ColorType.Gray;
                }
                if (colorValue.Contains(ColorType.Saturation.ToString()))
                {
                    colorType |= ColorType.Saturation;
                }
                if (colorType == ColorType.Null)
                {
                    colorType = ColorType.Gray;
                }
            }
            else
            {
                colorType = ColorType.Gray;
            }

            List <Rectangle> rects   = new List <Rectangle>(4);
            List <int>       weights = new List <int>(4);

            foreach (XmlNode node in weakNode.ChildNodes)
            {
                string[] nums   = node.InnerText.Split(' ');
                int      x      = int.Parse(nums[0]);
                int      y      = int.Parse(nums[1]);
                int      width  = int.Parse(nums[2]);
                int      height = int.Parse(nums[3]);
                int      ww     = int.Parse(nums[4]);
                rects.Add(new Rectangle(x, y, width, height));
                weights.Add(ww);
            }
            return(WeakClassifier.Load(threshold, posLargeThanThreshold, weight, rects.ToArray(), weights.ToArray(), colorType));
        }
Beispiel #6
0
 private void AddRect(int x1, int y1, int w1, int h1, int ww1, int x2, int y2, int w2, int h2, int ww2)
 {
     Rectangle[] rects = new Rectangle[] {
         new Rectangle(x1, y1, w1, h1),
         new Rectangle(x2, y2, w2, h2),
     };
     int[] weights = new int[] { ww1, ww2 };
     if (_gray)
     {
         HaarFeature gray = new HaarFeature(_weakId, rects, weights, ColorType.Gray);
         _weakClassifiers[_weakId] = new WeakClassifier(gray);
         _weakId++;
     }
     if (_saturation)
     {
         HaarFeature saturation = new HaarFeature(_weakId, rects, weights, ColorType.Saturation);
         _weakClassifiers[_weakId] = new WeakClassifier(saturation);
         _weakId++;
     }
 }
Beispiel #7
0
        private void treeView1_AfterSelect(object sender, TreeViewEventArgs e)
        {
            if ((e.Node.Tag is WeakClassifier) == false)
            {
                return;
            }
            WeakClassifier weak = e.Node.Tag as WeakClassifier;

            textBox1.Text = weak.GetNumString();

            Image <Gray, Byte> imgBlank = _imgBlank.Copy();
            Image <Bgr, Byte>  imgPos   = _imgPos.Copy();
            HaarFeature        feature  = (HaarFeature)weak.Feature;

            for (int i = 0; i < feature._rects.Length; i++)
            {
                Image <Gray, Byte> mask = new Image <Gray, Byte>(imgBlank.Size);
                int       weight        = feature._weights[i];
                Rectangle rect          = feature._rects[i];
                rect.Height--;
                rect.Width--;
                mask.Draw(rect, new Gray(Math.Abs(weight)), 0);
                if (weight > 0)
                {
                    imgBlank = imgBlank.Add(mask);
                }
                else
                {
                    imgBlank = imgBlank.Sub(mask);
                }

                imgPos.Draw(rect, new Bgr(), 1);
            }
            Image <Gray, Byte> light = imgBlank.ThresholdBinary(new Gray(128), new Gray(255));
            Image <Gray, Byte> dark  = imgBlank.ThresholdBinaryInv(new Gray(127), new Gray(255));

            imgBlank.SetValue(new Gray(255), light);
            imgBlank.SetValue(new Gray(0), dark);
            imageBox1.Image = imgBlank;
            imageBox2.Image = imgPos;
        }
Beispiel #8
0
        /// <summary>
        /// 使用AdaBoost算法训练一次,得到一个最优的弱分类器
        /// </summary>
        /// <param name="posSamples">正样本集</param>
        /// <param name="negSamples">负样本集</param>
        /// <param name="sampleWeight">样本权值,依次包含了正样本和负样本的权值</param>
        /// <returns>分类误差最优的弱分类器</returns>
        private WeakClassifier AdaBoost(int numPos, int numNeg, MyFloat[] sampleWeight)
        {
            int    count = numNeg + numPos;
            double minErr = double.MaxValue;
            int    minErrIndex = 0, i;

            WeakClassifier[] weakClassifiers = WeakClassifierManager.Instance.WeakClassifiers;
            int classifierCount = weakClassifiers.Length;

            for (int indexClassifier = 0; indexClassifier < classifierCount; indexClassifier++)
            {
                WeakClassifier classifier = weakClassifiers[indexClassifier];
                double         errCount   = 0;
                bool           result;
                for (i = 0; i < numPos; i++)
                {
                    //result=classifier.Predict(posSamples[i]);
                    result = classifier.Predict(i);
                    if (result == false)
                    {
                        errCount += sampleWeight[i];    //累加分类错误的样本权值
                    }
                }
                for (; i < count; i++)
                {
                    //result = classifier.Predict(negSamples[i]);
                    result = classifier.Predict(i);
                    if (result == true)
                    {
                        errCount += sampleWeight[i];    //累加分类错误的样本权值
                    }
                }
                if (errCount < minErr)  //记录最佳弱分类器
                {
                    minErr      = errCount;
                    minErrIndex = indexClassifier;
                }
            }

            //DebugMsg.AddMessage(minErr.ToString(), 0);
            bool[]         classifyResult = new bool[count];
            WeakClassifier bestClassifier = (WeakClassifier)weakClassifiers[minErrIndex].Clone();
            {
                bool result;
                for (i = 0; i < numPos; i++)
                {
                    result            = bestClassifier.Predict(i);
                    classifyResult[i] = result;
                }
                for (; i < count; i++)
                {
                    result            = bestClassifier.Predict(i);
                    classifyResult[i] = !result;
                }
            }

            //调整样本权值
            //             logRatio( double val )
            // {
            //     const double eps = 1e-5;
            //
            //     val = max( val, eps );
            //     val = min( val, 1. - eps );
            //     return log( val/(1. - val) );
            // }
            const double eps = (double)1e-5;
            double       val = minErr;

            val    = Math.Max(val, eps);
            val    = Math.Min(val, 1 - eps);
            minErr = val;
            MyFloat factor = (MyFloat)(minErr / (1 - minErr));

            for (int indexSample = 0; indexSample < count; indexSample++)
            {
                if (classifyResult[indexSample])
                {
                    sampleWeight[indexSample] *= factor;
                }
            }
            //WeakClassifier bestClassifier= (WeakClassifier)weakClassifiers[minErrIndex].Clone();
            //bestClassifier.Weight =(MyFloat) Math.Log10(1 / factor);  //设定弱分类器权值α
            bestClassifier.Weight = -(MyFloat)Math.Log10(factor);  //设定弱分类器权值α
            return(bestClassifier);
        }
Beispiel #9
0
        /// <summary>
        /// 训练级联分类器中一层的强分类器
        /// </summary>
        /// <param name="posSamples"></param>
        /// <param name="negSamples"></param>
        /// <param name="maxFalsePositiveRate"></param>
        /// <param name="minHitRate"></param>
        /// <returns>训练的统计结果</returns>
        public PredictResult Train(SampleCollection posSamples,
                                   SampleCollection negSamples,
                                   SampleCollection validateSamples,
                                   double maxFalsePositiveRate,
                                   double minHitRate)
        {
            List <WeakClassifier> weakClassifiers = new List <WeakClassifier>(10);
            PredictResult         result          = new PredictResult();

            MyFloat[]             sampleWeight = InitWeight(posSamples.Count, negSamples.Count);
            WeakClassifierManager allWeak      = WeakClassifierManager.Instance;
            Stopwatch             watch        = new Stopwatch();

            watch.Start();
            allWeak.PreCalcFeatureValue(posSamples, negSamples);
            watch.Stop();
            if (DebugMsg.Debug)
            {
                string msg = string.Format("所有弱分类器特征值预计算完成,用时:{0}\r\n",
                                           watch.Elapsed.ToString());
                DebugMsg.AddMessage(msg, 0);
            }

            int trainTime = 0;

            do
            {
                if (++trainTime != 1)
                {
                    NormalizeWeight(sampleWeight);
                }

                if (DebugMsg.Debug)
                {
                    string msg = string.Format("开始训练第{0}个弱分类器\r\n",
                                               trainTime);
                    DebugMsg.AddMessage(msg, 0);
                }
                watch.Reset();
                watch.Start();


                allWeak.Train(posSamples, negSamples, sampleWeight);
                WeakClassifier newBestClassifier = AdaBoost(posSamples.Count, negSamples.Count, sampleWeight);
                //UpdateWeights(newBestClassifier, posSamples.Count, negSamples.Count, sampleWeight);
                weakClassifiers.Add(newBestClassifier);
                _classifiers = weakClassifiers.ToArray();

                result = EvaluateErrorRate(validateSamples, minHitRate, maxFalsePositiveRate);
                watch.Stop();

                if (DebugMsg.Debug)
                {
                    string msg = string.Format("训练完成,花费时间{0}\r\n检测率:\t{1:P5}\t误检率:\t{2:P5}\r\n",
                                               watch.Elapsed.ToString(),
                                               result.HitRate,
                                               result.FalsePositiveRate);
                    DebugMsg.AddMessage(msg, 1);
                }
            } while (result.FalsePositiveRate > maxFalsePositiveRate);

            allWeak.ReleaseTrainData();
            foreach (WeakClassifier weak in _classifiers)
            {
                weak.ReleaseTrainData();
            }
            return(result);
        }