示例#1
0
        private void UpdateWeights(WeakClassifier bestClassifier, int numPos, int numNeg, float[] sampleWeight)
        {
            int    count = numNeg + numPos;
            double err   = 0;
            int    i;

            bool[] classifyResult = new bool[count];
            bool   result;

            for (i = 0; i < numPos; i++)
            {
                result = bestClassifier.Predict(i);
                if (result == false)
                {
                    err += sampleWeight[i];    //累加分类错误的样本权值
                }
                classifyResult[i] = result;
            }
            for (i = numPos; i < count; i++)
            {
                result = bestClassifier.Predict(i);
                if (result == true)
                {
                    err += sampleWeight[i];    //累加分类错误的样本权值
                }
                classifyResult[i] = !result;
            }

            //调整样本权值
            //             logRatio( double val )
            // {
            //     const double eps = 1e-5;
            //
            //     val = max( val, eps );
            //     val = min( val, 1. - eps );
            //     return log( val/(1. - val) );
            // }
            const double eps = (double)1e-5;
            double       val = err;

            val = Math.Max(val, eps);
            val = Math.Min(val, 1 - eps);
            err = val;
            MyFloat factor = (MyFloat)(err / (1 - err));

            for (int indexSample = 0; indexSample < count; indexSample++)
            {
                if (classifyResult[indexSample])
                {
                    sampleWeight[indexSample] *= factor;
                }
            }
            bestClassifier.Weight = -(MyFloat)Math.Log10(factor);  //设定弱分类器权值α
        }
示例#2
0
        /// <summary>
        /// 使用AdaBoost算法训练一次,得到一个最优的弱分类器
        /// </summary>
        /// <param name="posSamples">正样本集</param>
        /// <param name="negSamples">负样本集</param>
        /// <param name="sampleWeight">样本权值,依次包含了正样本和负样本的权值</param>
        /// <returns>分类误差最优的弱分类器</returns>
        private WeakClassifier AdaBoost(int numPos, int numNeg, MyFloat[] sampleWeight)
        {
            int    count = numNeg + numPos;
            double minErr = double.MaxValue;
            int    minErrIndex = 0, i;

            WeakClassifier[] weakClassifiers = WeakClassifierManager.Instance.WeakClassifiers;
            int classifierCount = weakClassifiers.Length;

            for (int indexClassifier = 0; indexClassifier < classifierCount; indexClassifier++)
            {
                WeakClassifier classifier = weakClassifiers[indexClassifier];
                double         errCount   = 0;
                bool           result;
                for (i = 0; i < numPos; i++)
                {
                    //result=classifier.Predict(posSamples[i]);
                    result = classifier.Predict(i);
                    if (result == false)
                    {
                        errCount += sampleWeight[i];    //累加分类错误的样本权值
                    }
                }
                for (; i < count; i++)
                {
                    //result = classifier.Predict(negSamples[i]);
                    result = classifier.Predict(i);
                    if (result == true)
                    {
                        errCount += sampleWeight[i];    //累加分类错误的样本权值
                    }
                }
                if (errCount < minErr)  //记录最佳弱分类器
                {
                    minErr      = errCount;
                    minErrIndex = indexClassifier;
                }
            }

            //DebugMsg.AddMessage(minErr.ToString(), 0);
            bool[]         classifyResult = new bool[count];
            WeakClassifier bestClassifier = (WeakClassifier)weakClassifiers[minErrIndex].Clone();
            {
                bool result;
                for (i = 0; i < numPos; i++)
                {
                    result            = bestClassifier.Predict(i);
                    classifyResult[i] = result;
                }
                for (; i < count; i++)
                {
                    result            = bestClassifier.Predict(i);
                    classifyResult[i] = !result;
                }
            }

            //调整样本权值
            //             logRatio( double val )
            // {
            //     const double eps = 1e-5;
            //
            //     val = max( val, eps );
            //     val = min( val, 1. - eps );
            //     return log( val/(1. - val) );
            // }
            const double eps = (double)1e-5;
            double       val = minErr;

            val    = Math.Max(val, eps);
            val    = Math.Min(val, 1 - eps);
            minErr = val;
            MyFloat factor = (MyFloat)(minErr / (1 - minErr));

            for (int indexSample = 0; indexSample < count; indexSample++)
            {
                if (classifyResult[indexSample])
                {
                    sampleWeight[indexSample] *= factor;
                }
            }
            //WeakClassifier bestClassifier= (WeakClassifier)weakClassifiers[minErrIndex].Clone();
            //bestClassifier.Weight =(MyFloat) Math.Log10(1 / factor);  //设定弱分类器权值α
            bestClassifier.Weight = -(MyFloat)Math.Log10(factor);  //设定弱分类器权值α
            return(bestClassifier);
        }