示例#1
0
        /// <summary>
        /// 原点(直流成分)が画像の中心にくるように,画像の象限を入れ替える関数.
        /// srcArr, dstArr は同じサイズ,タイプの配列.
        /// </summary>
        /// <param name="srcArr"></param>
        /// <param name="dstArr"></param>
        private static void ShiftDFT(CvArr srcArr, CvArr dstArr)
        {
            CvSize size    = Cv.GetSize(srcArr);
            CvSize dstSize = Cv.GetSize(dstArr);

            if (dstSize.Width != size.Width || dstSize.Height != size.Height)
            {
                throw new ApplicationException("Source and Destination arrays must have equal sizes");
            }
            // (9)インプレースモード用のテンポラリバッファ
            CvMat tmp = null;

            if (srcArr == dstArr)
            {
                tmp = Cv.CreateMat(size.Height / 2, size.Width / 2, Cv.GetElemType(srcArr));
            }
            int cx = size.Width / 2;   /* 画像中心 */
            int cy = size.Height / 2;

            // (10)1〜4象限を表す配列と,そのコピー先
            CvMat q1stub, q2stub;
            CvMat q3stub, q4stub;
            CvMat d1stub, d2stub;
            CvMat d3stub, d4stub;
            CvMat q1 = Cv.GetSubRect(srcArr, out q1stub, new CvRect(0, 0, cx, cy));
            CvMat q2 = Cv.GetSubRect(srcArr, out q2stub, new CvRect(cx, 0, cx, cy));
            CvMat q3 = Cv.GetSubRect(srcArr, out q3stub, new CvRect(cx, cy, cx, cy));
            CvMat q4 = Cv.GetSubRect(srcArr, out q4stub, new CvRect(0, cy, cx, cy));
            CvMat d1 = Cv.GetSubRect(srcArr, out d1stub, new CvRect(0, 0, cx, cy));
            CvMat d2 = Cv.GetSubRect(srcArr, out d2stub, new CvRect(cx, 0, cx, cy));
            CvMat d3 = Cv.GetSubRect(srcArr, out d3stub, new CvRect(cx, cy, cx, cy));
            CvMat d4 = Cv.GetSubRect(srcArr, out d4stub, new CvRect(0, cy, cx, cy));

            // (11)実際の象限の入れ替え
            if (srcArr != dstArr)
            {
                if (!Cv.ARE_TYPES_EQ(q1, d1))
                {
                    throw new ApplicationException("Source and Destination arrays must have the same format");
                }
                Cv.Copy(q3, d1, null);
                Cv.Copy(q4, d2, null);
                Cv.Copy(q1, d3, null);
                Cv.Copy(q2, d4, null);
            }
            else
            {      /* インプレースモード */
                Cv.Copy(q3, tmp, null);
                Cv.Copy(q1, q3, null);
                Cv.Copy(tmp, q1, null);
                Cv.Copy(q4, tmp, null);
                Cv.Copy(q2, q4, null);
                Cv.Copy(tmp, q2, null);
            }
            if (tmp != null)
            {
                tmp.Dispose();
            }
        }
示例#2
0
 /// <summary>
 /// オブジェクトが確保されている場合にDisposeします
 /// </summary>
 /// <param name="obj"></param>
 public static void DisposeToNull(ref CvMat obj)
 {
     if (obj != null)
     {
         obj.Dispose();
         obj = null;
     }
 }
示例#3
0
        /// <summary>
        ///
        /// </summary>
        /// <param name="data"></param>
        /// <param name="missing"></param>
        /// <param name="responses"></param>
        /// <param name="pWeight"></param>
        /// <returns></returns>
        private CvDTree MushroomCreateDTree(CvMat data, CvMat missing, CvMat responses, float pWeight)
        {
            float[] priors = { 1, pWeight };

            CvMat varType = new CvMat(data.Cols + 1, 1, MatrixType.U8C1);

            Cv.Set(varType, CvScalar.ScalarAll(CvStatModel.CV_VAR_CATEGORICAL)); // all the variables are categorical

            CvDTree dtree = new CvDTree();

            CvDTreeParams p = new CvDTreeParams(8,     // max depth
                                                10,    // min sample count
                                                0,     // regression accuracy: N/A here
                                                true,  // compute surrogate split, as we have missing data
                                                15,    // max number of categories (use sub-optimal algorithm for larger numbers)
                                                10,    // the number of cross-validation folds
                                                true,  // use 1SE rule => smaller tree
                                                true,  // throw away the pruned tree branches
                                                priors // the array of priors, the bigger p_weight, the more attention
                                                       // to the poisonous mushrooms
                                                       // (a mushroom will be judjed to be poisonous with bigger chance)
                                                );

            dtree.Train(data, DTreeDataLayout.RowSample, responses, null, null, varType, missing, p);

            // compute hit-rate on the training database, demonstrates predict usage.
            int hr1 = 0, hr2 = 0, pTotal = 0;

            for (int i = 0; i < data.Rows; i++)
            {
                CvMat sample, mask;
                Cv.GetRow(data, out sample, i);
                Cv.GetRow(missing, out mask, i);
                double r = dtree.Predict(sample, mask).Value;
                bool   d = Math.Abs(r - responses.DataArraySingle[i]) >= float.Epsilon;
                if (d)
                {
                    if (r != 'p')
                    {
                        hr1++;
                    }
                    else
                    {
                        hr2++;
                    }
                }
                //Console.WriteLine(responses.DataArraySingle[i]);
                pTotal += (responses.DataArraySingle[i] == (float)'p') ? 1 : 0;
            }

            Console.WriteLine("Results on the training database");
            Console.WriteLine("\tPoisonous mushrooms mis-predicted: {0} ({1}%)", hr1, (double)hr1 * 100 / pTotal);
            Console.WriteLine("\tFalse-alarms: {0} ({1}%)", hr2, (double)hr2 * 100 / (data.Rows - pTotal));

            varType.Dispose();

            return(dtree);
        }
示例#4
0
 /// <summary>
 /// 領域が未確保またはフォーマットが異なる場合は新しく領域を確保します.
 /// </summary>
 /// <param name="dest"></param>
 /// <param name="rows"></param>
 /// <param name="cols"></param>
 /// <param name="type"></param>
 public static void InitCvMat(ref CvMat dest, int rows, int cols, MatrixType type)
 {
     if (dest == null || dest.Cols != cols || dest.Rows != rows || dest.ElemType != type)
     {
         if (dest != null)
         {
             dest.Dispose();
         }
         dest = new CvMat(rows, cols, type);
     }
 }
示例#5
0
 public void Clear()
 {
     if (Mask != null)
     {
         Mask.Dispose(); Mask = null;
     }
     if (PrevParam != null)
     {
         PrevParam.Dispose(); PrevParam = null;
     }
     if (Param != null)
     {
         Param.Dispose(); Param = null;
     }
     if (J != null)
     {
         J.Dispose(); J = null;
     }
     if (Err != null)
     {
         Err.Dispose(); Err = null;
     }
     if (JtJ != null)
     {
         JtJ.Dispose(); JtJ = null;
     }
     if (JtJN != null)
     {
         JtJN.Dispose(); JtJN = null;
     }
     if (JtErr != null)
     {
         JtErr.Dispose(); JtErr = null;
     }
     if (JtJV != null)
     {
         JtJV.Dispose(); JtJV = null;
     }
     if (JtJW != null)
     {
         JtJW.Dispose(); JtJW = null;
     }
 }
示例#6
0
        /// <summary>
        /// 
        /// </summary>
        /// <param name="data"></param>
        /// <param name="missing"></param>
        /// <param name="responses"></param>
        /// <param name="pWeight"></param>
        /// <returns></returns>
        private CvDTree MushroomCreateDTree(CvMat data, CvMat missing, CvMat responses, float pWeight)
        {
            float[] priors = { 1, pWeight };

            CvMat varType = new CvMat(data.Cols + 1, 1, MatrixType.U8C1);
            Cv.Set(varType, CvScalar.ScalarAll(CvStatModel.CV_VAR_CATEGORICAL)); // all the variables are categorical

            CvDTree dtree = new CvDTree();

            CvDTreeParams p = new CvDTreeParams(8, // max depth
                                            10, // min sample count
                                            0, // regression accuracy: N/A here
                                            true, // compute surrogate split, as we have missing data
                                            15, // max number of categories (use sub-optimal algorithm for larger numbers)
                                            10, // the number of cross-validation folds
                                            true, // use 1SE rule => smaller tree
                                            true, // throw away the pruned tree branches
                                            priors // the array of priors, the bigger p_weight, the more attention
                // to the poisonous mushrooms
                // (a mushroom will be judjed to be poisonous with bigger chance)
            );

            dtree.Train(data, DTreeDataLayout.RowSample, responses, null, null, varType, missing, p);

            // compute hit-rate on the training database, demonstrates predict usage.
            int hr1 = 0, hr2 = 0, pTotal = 0;
            for (int i = 0; i < data.Rows; i++)
            {
                CvMat sample, mask;
                Cv.GetRow(data, out sample, i);
                Cv.GetRow(missing, out mask, i);
                double r = dtree.Predict(sample, mask).Value;
                bool d = Math.Abs(r - responses.DataArraySingle[i]) >= float.Epsilon;
                if (d)
                {
                    if (r != 'p')
                        hr1++;
                    else
                        hr2++;
                }
                //Console.WriteLine(responses.DataArraySingle[i]);
                pTotal += (responses.DataArraySingle[i] == (float)'p') ? 1 : 0;
            }

            Console.WriteLine("Results on the training database");
            Console.WriteLine("\tPoisonous mushrooms mis-predicted: {0} ({1}%)", hr1, (double)hr1 * 100 / pTotal);
            Console.WriteLine("\tFalse-alarms: {0} ({1}%)", hr2, (double)hr2 * 100 / (data.Rows - pTotal));

            varType.Dispose();

            return dtree;
        }
示例#7
0
        /// <summary>
        /// SVM
        /// </summary>
        /// <param name="dataFilename"></param>
        /// <param name="filenameToSave"></param>
        /// <param name="filenameToLoad"></param>
        private void BuildSvmClassifier(string dataFilename, string filenameToSave, string filenameToLoad)
        {
            //C_SVCのパラメータ
            const float SvmC = 1000;
            //RBFカーネルのパラメータ
            const float SvmGamma = 0.1f;

            CvMat data      = null;
            CvMat responses = null;
            CvMat sampleIdx = null;

            int    nsamplesAll = 0, ntrainSamples = 0;
            double trainHr = 0, testHr = 0;

            CvSVM          svm      = new CvSVM();
            CvTermCriteria criteria = new CvTermCriteria(100, 0.001);

            try
            {
                ReadNumClassData(dataFilename, 16, out data, out responses);
            }
            catch
            {
                Console.WriteLine("Could not read the database {0}", dataFilename);
                return;
            }
            Console.WriteLine("The database {0} is loaded.", dataFilename);

            nsamplesAll   = data.Rows;
            ntrainSamples = (int)(nsamplesAll * 0.2);

            // Create or load Random Trees classifier
            if (filenameToLoad != null)
            {
                // load classifier from the specified file
                svm.Load(filenameToLoad);
                ntrainSamples = 0;
                if (svm.GetSupportVectorCount() == 0)
                {
                    Console.WriteLine("Could not read the classifier {0}", filenameToLoad);
                    return;
                }
                Console.WriteLine("The classifier {0} is loaded.", filenameToLoad);
            }
            else
            {
                // create classifier by using <data> and <responses>
                Console.Write("Training the classifier ...");

                // 2. create sample_idx
                sampleIdx = new CvMat(1, nsamplesAll, MatrixType.U8C1);
                {
                    CvMat mat;
                    Cv.GetCols(sampleIdx, out mat, 0, ntrainSamples);
                    mat.Set(CvScalar.RealScalar(1));

                    Cv.GetCols(sampleIdx, out mat, ntrainSamples, nsamplesAll);
                    mat.SetZero();
                }

                // 3. train classifier
                // 方法、カーネルにより使わないパラメータは0で良く、
                // 重みについてもNULLで良い
                svm.Train(data, responses, null, sampleIdx, new CvSVMParams(CvSVM.C_SVC, CvSVM.RBF, 0, SvmGamma, 0, SvmC, 0, 0, null, criteria));
                Console.WriteLine();
            }


            // compute prediction error on train and test data
            for (int i = 0; i < nsamplesAll; i++)
            {
                double r;
                CvMat  sample;
                Cv.GetRow(data, out sample, i);

                r = svm.Predict(sample);
                // compare results
                Console.WriteLine(
                    "predict: {0}, responses: {1}, {2}",
                    (char)r,
                    (char)responses.DataArraySingle[i],
                    Math.Abs((double)r - responses.DataArraySingle[i]) <= float.Epsilon ? "Good!" : "Bad!"
                    );
                r = Math.Abs((double)r - responses.DataArraySingle[i]) <= float.Epsilon ? 1 : 0;

                if (i < ntrainSamples)
                {
                    trainHr += r;
                }
                else
                {
                    testHr += r;
                }
            }

            testHr  /= (double)(nsamplesAll - ntrainSamples);
            trainHr /= (double)ntrainSamples;
            Console.WriteLine("Gamma={0:F5}, C={1:F5}", SvmGamma, SvmC);
            if (filenameToLoad != null)
            {
                Console.WriteLine("Recognition rate: test = {0:F1}%", testHr * 100.0);
            }
            else
            {
                Console.WriteLine("Recognition rate: train = {0:F1}%, test = {1:F1}%", trainHr * 100.0, testHr * 100.0);
            }

            Console.WriteLine("Number of Support Vector: {0}", svm.GetSupportVectorCount());
            // Save SVM classifier to file if needed
            if (filenameToSave != null)
            {
                svm.Save(filenameToSave);
            }


            Console.Read();


            if (sampleIdx != null)
            {
                sampleIdx.Dispose();
            }
            data.Dispose();
            responses.Dispose();
            svm.Dispose();
        }
示例#8
0
        /// <summary>
        ///
        /// </summary>
        /// <param name="dataFilename"></param>
        /// <param name="filenameToSave"></param>
        /// <param name="filenameToLoad"></param>
        private void BuildMlpClassifier(string dataFilename, string filenameToSave, string filenameToLoad)
        {
            const int ClassCount = 26;

            CvMat data        = null;
            CvMat trainData   = null;
            CvMat responses   = null;
            CvMat mlpResponse = null;
            CvMat layerSizes  = null;

            int       nsamplesAll = 0, ntrainSamples = 0;
            double    trainHr = 0, testHr = 0;
            CvANN_MLP mlp = new CvANN_MLP();

            try
            {
                ReadNumClassData(dataFilename, 16, out data, out responses);
            }
            catch
            {
                Console.WriteLine("Could not read the database {0}", dataFilename);
                return;
            }
            Console.WriteLine("The database {0} is loaded.", dataFilename);

            nsamplesAll   = data.Rows;
            ntrainSamples = (int)(nsamplesAll * 0.8);

            // Create or load MLP classifier
            if (filenameToLoad != null)
            {
                // load classifier from the specified file
                mlp.Load(filenameToLoad);
                ntrainSamples = 0;
                if (mlp.GetLayerCount() == 0)
                {
                    Console.WriteLine("Could not read the classifier {0}", filenameToLoad);
                    return;
                }
                Console.WriteLine("The classifier {0} is loaded.", filenameToLoad);
            }
            else
            {
                // !!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!
                //
                // MLP does not support categorical variables by explicitly.
                // So, instead of the output class label, we will use
                // a binary vector of <class_count> components for training and,
                // therefore, MLP will give us a vector of "probabilities" at the
                // prediction stage
                //
                // !!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!

                using (CvMat newResponses = new CvMat(ntrainSamples, ClassCount, MatrixType.F32C1))
                {
                    // 1. unroll the responses
                    Console.WriteLine("Unrolling the responses...");
                    unsafe
                    {
                        for (int i = 0; i < ntrainSamples; i++)
                        {
                            int    clsLabel = Cv.Round(responses.DataArraySingle[i]) - 'A';
                            float *bitVec   = (float *)(newResponses.DataByte + i * newResponses.Step);
                            for (int j = 0; j < ClassCount; j++)
                            {
                                bitVec[j] = 0.0f;
                            }
                            bitVec[clsLabel] = 1.0f;
                        }
                    }
                    Cv.GetRows(data, out trainData, 0, ntrainSamples);

                    // 2. train classifier
                    int[] layerSizesData = { data.Cols, 100, 100, ClassCount };
                    layerSizes = new CvMat(1, layerSizesData.Length, MatrixType.S32C1, layerSizesData);
                    mlp.Create(layerSizes);
                    Console.Write("Training the classifier (may take a few minutes)...");
                    mlp.Train(
                        trainData, newResponses, null, null,
                        new CvANN_MLP_TrainParams(new CvTermCriteria(300, 0.01), MLPTrainingMethod.RPROP, 0.01)
                        );
                }
                Console.WriteLine();
            }

            mlpResponse = new CvMat(1, ClassCount, MatrixType.F32C1);

            // compute prediction error on train and test data
            for (int i = 0; i < nsamplesAll; i++)
            {
                int     bestClass;
                CvMat   sample;
                CvPoint minLoc, maxLoc;

                Cv.GetRow(data, out sample, i);
                mlp.Predict(sample, mlpResponse);
                mlpResponse.MinMaxLoc(out minLoc, out maxLoc, null);
                bestClass = maxLoc.X + 'A';

                int r = (Math.Abs((double)bestClass - responses.DataArraySingle[i]) < float.Epsilon) ? 1 : 0;

                if (i < ntrainSamples)
                {
                    trainHr += r;
                }
                else
                {
                    testHr += r;
                }
            }

            testHr  /= (double)(nsamplesAll - ntrainSamples);
            trainHr /= (double)ntrainSamples;
            Console.WriteLine("Recognition rate: train = {0:F1}%, test = {1:F1}%", trainHr * 100.0, testHr * 100.0);

            // Save classifier to file if needed
            if (filenameToSave != null)
            {
                mlp.Save(filenameToSave);
            }


            Console.Read();


            mlpResponse.Dispose();
            data.Dispose();
            responses.Dispose();
            if (layerSizes != null)
            {
                layerSizes.Dispose();
            }
            mlp.Dispose();
        }
示例#9
0
        /// <summary>
        ///
        /// </summary>
        /// <param name="dataFilename"></param>
        /// <param name="filenameToSave"></param>
        /// <param name="filenameToLoad"></param>
        private void BuildBoostClassifier(string dataFilename, string filenameToSave, string filenameToLoad)
        {
            const int ClassCount = 26;

            CvMat data          = null;
            CvMat responses     = null;
            CvMat varType       = null;
            CvMat tempSample    = null;
            CvMat weakResponses = null;

            int     nsamplesAall = 0, ntrainSamples = 0;
            int     varCount;
            double  trainHr = 0, testHr = 0;
            CvBoost boost = new CvBoost();

            try
            {
                ReadNumClassData(dataFilename, 16, out data, out responses);
            }
            catch
            {
                Console.WriteLine("Could not read the database {0}", dataFilename);
                return;
            }
            Console.WriteLine("The database {0} is loaded.", dataFilename);

            nsamplesAall  = data.Rows;
            ntrainSamples = (int)(nsamplesAall * 0.5);
            varCount      = data.Cols;

            // Create or load Boosted Tree classifier
            if (filenameToLoad != null)
            {
                // load classifier from the specified file
                boost.Load(filenameToLoad);
                ntrainSamples = 0;
                if (boost.GetWeakPredictors() == null)
                {
                    Console.WriteLine("Could not read the classifier {0}", filenameToLoad);
                    return;
                }
                Console.WriteLine("The classifier {0} is loaded.", filenameToLoad);
            }
            else
            {
                // !!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!
                //
                // As currently boosted tree classifier in MLL can only be trained
                // for 2-class problems, we transform the training database by
                // "unrolling" each training sample as many times as the number of
                // classes (26) that we have.
                //
                // !!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!

                using (CvMat newData = new CvMat(ntrainSamples * ClassCount, varCount + 1, MatrixType.F32C1))
                    using (CvMat newResponses = new CvMat(ntrainSamples * ClassCount, 1, MatrixType.S32C1))
                    {
                        // 1. unroll the database type mask
                        Console.WriteLine("Unrolling the database...");
                        for (int i = 0; i < ntrainSamples; i++)
                        {
                            unsafe
                            {
                                float *dataRow = (float *)(data.DataByte + data.Step * i);
                                for (int j = 0; j < ClassCount; j++)
                                {
                                    float *newDataRow = (float *)(newData.DataByte + newData.Step * (i * ClassCount + j));
                                    for (int k = 0; k < varCount; k++)
                                    {
                                        newDataRow[k] = dataRow[k];
                                    }
                                    newDataRow[varCount] = (float)j;
                                    newResponses.DataInt32[i * ClassCount + j] = (responses.DataSingle[i] == j + 'A') ? 1 : 0;
                                }
                            }
                        }

                        // 2. create type mask
                        varType = new CvMat(varCount + 2, 1, MatrixType.U8C1);
                        varType.Set(CvScalar.ScalarAll(CvStatModel.CV_VAR_ORDERED));
                        // the last indicator variable, as well
                        // as the new (binary) response are categorical
                        varType.SetReal1D(varCount, CvStatModel.CV_VAR_CATEGORICAL);
                        varType.SetReal1D(varCount + 1, CvStatModel.CV_VAR_CATEGORICAL);

                        // 3. train classifier
                        Console.Write("Training the classifier (may take a few minutes)...");
                        boost.Train(
                            newData, DTreeDataLayout.RowSample, newResponses, null, null, varType, null,
                            new CvBoostParams(CvBoost.REAL, 100, 0.95, 5, false, null)
                            );
                    }
                Console.WriteLine();
            }

            tempSample    = new CvMat(1, varCount + 1, MatrixType.F32C1);
            weakResponses = new CvMat(1, boost.GetWeakPredictors().Total, MatrixType.F32C1);

            // compute prediction error on train and test data
            for (int i = 0; i < nsamplesAall; i++)
            {
                int    bestClass = 0;
                double maxSum    = double.MinValue;
                double r;
                CvMat  sample;

                Cv.GetRow(data, out sample, i);
                for (int k = 0; k < varCount; k++)
                {
                    tempSample.DataArraySingle[k] = sample.DataArraySingle[k];
                }

                for (int j = 0; j < ClassCount; j++)
                {
                    tempSample.DataArraySingle[varCount] = (float)j;
                    boost.Predict(tempSample, null, weakResponses);
                    double sum = weakResponses.Sum().Val0;
                    if (maxSum < sum)
                    {
                        maxSum    = sum;
                        bestClass = j + 'A';
                    }
                }

                r = (Math.Abs(bestClass - responses.DataArraySingle[i]) < float.Epsilon) ? 1 : 0;

                if (i < ntrainSamples)
                {
                    trainHr += r;
                }
                else
                {
                    testHr += r;
                }
            }

            testHr  /= (double)(nsamplesAall - ntrainSamples);
            trainHr /= (double)ntrainSamples;
            Console.WriteLine("Recognition rate: train = {0:F1}%, test = {1:F1}%", trainHr * 100.0, testHr * 100.0);
            Console.WriteLine("Number of trees: {0}", boost.GetWeakPredictors().Total);

            // Save classifier to file if needed
            if (filenameToSave != null)
            {
                boost.Save(filenameToSave);
            }


            Console.Read();


            tempSample.Dispose();
            weakResponses.Dispose();
            if (varType != null)
            {
                varType.Dispose();
            }
            data.Dispose();
            responses.Dispose();
            boost.Dispose();
        }
示例#10
0
        /// <summary>
        /// RTrees
        /// </summary>
        /// <param name="dataFilename"></param>
        /// <param name="filenameToSave"></param>
        /// <param name="filenameToLoad"></param>
        private void BuildRtreesClassifier(string dataFilename, string filenameToSave, string filenameToLoad)
        {
            CvMat data      = null;
            CvMat responses = null;
            CvMat varType   = null;
            CvMat sampleIdx = null;


            int      nsamplesAll = 0, ntrainSamples = 0;
            double   trainHr = 0, testHr = 0;
            CvRTrees forest = new CvRTrees();

            try
            {
                ReadNumClassData(dataFilename, 16, out data, out responses);
            }
            catch
            {
                Console.WriteLine("Could not read the database {0}", dataFilename);
                return;
            }
            Console.WriteLine("The database {0} is loaded.", dataFilename);

            nsamplesAll   = data.Rows;
            ntrainSamples = (int)(nsamplesAll * 0.8);

            // Create or load Random Trees classifier
            if (filenameToLoad != null)
            {
                // load classifier from the specified file
                forest.Load(filenameToLoad);
                ntrainSamples = 0;
                if (forest.GetTreeCount() == 0)
                {
                    Console.WriteLine("Could not read the classifier {0}", filenameToLoad);
                    return;
                }
                Console.WriteLine("The classifier {0} is loaded.", filenameToLoad);
            }
            else
            {
                // create classifier by using <data> and <responses>
                Console.Write("Training the classifier ...");

                // 1. create type mask
                varType = new CvMat(data.Cols + 1, 1, MatrixType.U8C1);
                varType.Set(CvScalar.ScalarAll(CvStatModel.CV_VAR_ORDERED));
                varType.SetReal1D(data.Cols, CvStatModel.CV_VAR_CATEGORICAL);

                // 2. create sample_idx
                sampleIdx = new CvMat(1, nsamplesAll, MatrixType.U8C1);
                {
                    CvMat mat;
                    Cv.GetCols(sampleIdx, out mat, 0, ntrainSamples);
                    mat.Set(CvScalar.RealScalar(1));

                    Cv.GetCols(sampleIdx, out mat, ntrainSamples, nsamplesAll);
                    mat.SetZero();
                }

                // 3. train classifier
                forest.Train(
                    data, DTreeDataLayout.RowSample, responses, null, sampleIdx, varType, null,
                    new CvRTParams(10, 10, 0, false, 15, null, true, 4, new CvTermCriteria(100, 0.01f))
                    );
                Console.WriteLine();
            }

            // compute prediction error on train and test data
            for (int i = 0; i < nsamplesAll; i++)
            {
                double r;
                CvMat  sample;
                Cv.GetRow(data, out sample, i);

                r = forest.Predict(sample);
                r = Math.Abs((double)r - responses.DataArraySingle[i]) <= float.Epsilon ? 1 : 0;

                if (i < ntrainSamples)
                {
                    trainHr += r;
                }
                else
                {
                    testHr += r;
                }
            }

            testHr  /= (double)(nsamplesAll - ntrainSamples);
            trainHr /= (double)ntrainSamples;
            Console.WriteLine("Recognition rate: train = {0:F1}%, test = {1:F1}%", trainHr * 100.0, testHr * 100.0);

            Console.WriteLine("Number of trees: {0}", forest.GetTreeCount());

            // Print variable importance
            Mat   varImportance0 = forest.GetVarImportance();
            CvMat varImportance  = varImportance0.ToCvMat();

            if (varImportance != null)
            {
                double rtImpSum = Cv.Sum(varImportance).Val0;
                Console.WriteLine("var#\timportance (in %):");
                for (int i = 0; i < varImportance.Cols; i++)
                {
                    Console.WriteLine("{0}\t{1:F1}", i, 100.0f * varImportance.DataArraySingle[i] / rtImpSum);
                }
            }

            // Print some proximitites
            Console.WriteLine("Proximities between some samples corresponding to the letter 'T':");
            {
                CvMat sample1, sample2;
                int[,] pairs = new int[, ] {
                    { 0, 103 }, { 0, 106 }, { 106, 103 }, { -1, -1 }
                };

                for (int i = 0; pairs[i, 0] >= 0; i++)
                {
                    Cv.GetRow(data, out sample1, pairs[i, 0]);
                    Cv.GetRow(data, out sample2, pairs[i, 1]);
                    Console.WriteLine("proximity({0},{1}) = {2:F1}%", pairs[i, 0], pairs[i, 1], forest.GetProximity(sample1, sample2) * 100.0);
                }
            }

            // Save Random Trees classifier to file if needed
            if (filenameToSave != null)
            {
                forest.Save(filenameToSave);
            }


            Console.Read();


            if (sampleIdx != null)
            {
                sampleIdx.Dispose();
            }
            if (varType != null)
            {
                varType.Dispose();
            }
            data.Dispose();
            responses.Dispose();
            forest.Dispose();
        }
示例#11
0
        /// <summary>
        /// 
        /// </summary>
        /// <param name="dataFilename"></param>
        /// <param name="filenameToSave"></param>
        /// <param name="filenameToLoad"></param>
        private void BuildMlpClassifier(string dataFilename, string filenameToSave, string filenameToLoad)
        {
            const int ClassCount = 26;

            CvMat data = null;
            CvMat trainData = null;
            CvMat responses = null;
            CvMat mlpResponse = null;
            CvMat layerSizes = null;

            int nsamplesAll = 0, ntrainSamples = 0;
            double trainHr = 0, testHr = 0;
            CvANN_MLP mlp = new CvANN_MLP();

            try
            {
                ReadNumClassData(dataFilename, 16, out data, out responses);
            }
            catch
            {
                Console.WriteLine("Could not read the database {0}", dataFilename);
                return;
            }
            Console.WriteLine("The database {0} is loaded.", dataFilename);

            nsamplesAll = data.Rows;
            ntrainSamples = (int)(nsamplesAll * 0.8);

            // Create or load MLP classifier
            if (filenameToLoad != null)
            {
                // load classifier from the specified file
                mlp.Load(filenameToLoad);
                ntrainSamples = 0;
                if (mlp.GetLayerCount() == 0)
                {
                    Console.WriteLine("Could not read the classifier {0}", filenameToLoad);
                    return;
                }
                Console.WriteLine("The classifier {0} is loaded.", filenameToLoad);
            }
            else
            {
                // !!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!
                //
                // MLP does not support categorical variables by explicitly.
                // So, instead of the output class label, we will use
                // a binary vector of <class_count> components for training and,
                // therefore, MLP will give us a vector of "probabilities" at the
                // prediction stage
                //
                // !!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!

                using (CvMat newResponses = new CvMat(ntrainSamples, ClassCount, MatrixType.F32C1))
                {
                    // 1. unroll the responses
                    Console.WriteLine("Unrolling the responses...");
                    unsafe
                    {
                        for (int i = 0; i < ntrainSamples; i++)
                        {
                            int clsLabel = Cv.Round(responses.DataArraySingle[i]) - 'A';
                            float* bitVec = (float*)(newResponses.DataByte + i * newResponses.Step);
                            for (int j = 0; j < ClassCount; j++)
                            {
                                bitVec[j] = 0.0f;
                            }
                            bitVec[clsLabel] = 1.0f;
                        }
                    }
                    Cv.GetRows(data, out trainData, 0, ntrainSamples);

                    // 2. train classifier
                    int[] layerSizesData = { data.Cols, 100, 100, ClassCount };
                    layerSizes = new CvMat(1, layerSizesData.Length, MatrixType.S32C1, layerSizesData);
                    mlp.Create(layerSizes);
                    Console.Write("Training the classifier (may take a few minutes)...");
                    mlp.Train(
                        trainData, newResponses, null, null,
                        new CvANN_MLP_TrainParams(new CvTermCriteria(300, 0.01), MLPTrainingMethod.RPROP, 0.01)
                    );
                }
                Console.WriteLine();
            }

            mlpResponse = new CvMat(1, ClassCount, MatrixType.F32C1);

            // compute prediction error on train and test data
            for (int i = 0; i < nsamplesAll; i++)
            {
                int bestClass;
                CvMat sample;
                CvPoint minLoc, maxLoc;

                Cv.GetRow(data, out sample, i);                
                mlp.Predict(sample, mlpResponse);
                mlpResponse.MinMaxLoc(out minLoc, out maxLoc, null);
                bestClass = maxLoc.X + 'A';

                int r = (Math.Abs((double)bestClass - responses.DataArraySingle[i]) < float.Epsilon) ? 1 : 0;

                if (i < ntrainSamples)
                    trainHr += r;
                else
                    testHr += r;
            }

            testHr /= (double)(nsamplesAll - ntrainSamples);
            trainHr /= (double)ntrainSamples;
            Console.WriteLine("Recognition rate: train = {0:F1}%, test = {1:F1}%", trainHr * 100.0, testHr * 100.0);

            // Save classifier to file if needed
            if (filenameToSave != null)
            {
                mlp.Save(filenameToSave);
            }


            Console.Read();


            mlpResponse.Dispose();
            data.Dispose();
            responses.Dispose();
            if (layerSizes != null) layerSizes.Dispose();
            mlp.Dispose();
        }
示例#12
0
        /// <summary>
        /// 
        /// </summary>
        /// <param name="dataFilename"></param>
        /// <param name="filenameToSave"></param>
        /// <param name="filenameToLoad"></param>
        private void BuildBoostClassifier(string dataFilename, string filenameToSave, string filenameToLoad)
        {
            const int ClassCount = 26;

            CvMat data = null;
            CvMat responses = null;
            CvMat varType = null;
            CvMat tempSample = null;
            CvMat weakResponses = null;

            int nsamplesAall = 0, ntrainSamples = 0;
            int varCount;
            double trainHr = 0, testHr = 0;
            CvBoost boost = new CvBoost();

            try
            {
                ReadNumClassData(dataFilename, 16, out data, out responses);
            }
            catch
            {
                Console.WriteLine("Could not read the database {0}", dataFilename);
                return;
            }
            Console.WriteLine("The database {0} is loaded.", dataFilename);

            nsamplesAall = data.Rows;
            ntrainSamples = (int)(nsamplesAall * 0.5);
            varCount = data.Cols;

            // Create or load Boosted Tree classifier
            if (filenameToLoad != null)
            {
                // load classifier from the specified file
                boost.Load(filenameToLoad);
                ntrainSamples = 0;
                if (boost.GetWeakPredictors() == null)
                {
                    Console.WriteLine("Could not read the classifier {0}", filenameToLoad);
                    return;
                }
                Console.WriteLine("The classifier {0} is loaded.", filenameToLoad);
            }
            else
            {
                // !!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!
                //
                // As currently boosted tree classifier in MLL can only be trained
                // for 2-class problems, we transform the training database by
                // "unrolling" each training sample as many times as the number of
                // classes (26) that we have.
                //
                // !!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!

                using (CvMat newData = new CvMat(ntrainSamples * ClassCount, varCount + 1, MatrixType.F32C1))
                using (CvMat newResponses = new CvMat(ntrainSamples * ClassCount, 1, MatrixType.S32C1))
                {

                    // 1. unroll the database type mask
                    Console.WriteLine("Unrolling the database...");
                    for (int i = 0; i < ntrainSamples; i++)
                    {
                        unsafe
                        {
                            float* dataRow = (float*)(data.DataByte + data.Step * i);
                            for (int j = 0; j < ClassCount; j++)
                            {
                                float* newDataRow = (float*)(newData.DataByte + newData.Step * (i * ClassCount + j));
                                for (int k = 0; k < varCount; k++)
                                {
                                    newDataRow[k] = dataRow[k];
                                }
                                newDataRow[varCount] = (float)j;
                                newResponses.DataInt32[i * ClassCount + j] = (responses.DataSingle[i] == j + 'A') ? 1 : 0;
                            }
                        }
                    }

                    // 2. create type mask
                    varType = new CvMat(varCount + 2, 1, MatrixType.U8C1);
                    varType.Set(CvScalar.ScalarAll(CvStatModel.CV_VAR_ORDERED));
                    // the last indicator variable, as well
                    // as the new (binary) response are categorical
                    varType.SetReal1D(varCount, CvStatModel.CV_VAR_CATEGORICAL);
                    varType.SetReal1D(varCount + 1, CvStatModel.CV_VAR_CATEGORICAL);

                    // 3. train classifier
                    Console.Write("Training the classifier (may take a few minutes)...");
                    boost.Train(
                        newData, DTreeDataLayout.RowSample, newResponses, null, null, varType, null,
                        new CvBoostParams(CvBoost.REAL, 100, 0.95, 5, false, null)
                    );
                }
                Console.WriteLine();
            }

            tempSample = new CvMat(1, varCount + 1, MatrixType.F32C1);
            weakResponses = new CvMat(1, boost.GetWeakPredictors().Total, MatrixType.F32C1);

            // compute prediction error on train and test data
            for (int i = 0; i < nsamplesAall; i++)
            {
                int bestClass = 0;
                double maxSum = double.MinValue;
                double r;
                CvMat sample;

                Cv.GetRow(data, out sample, i);
                for (int k = 0; k < varCount; k++)
                {
                    tempSample.DataArraySingle[k] = sample.DataArraySingle[k];
                }

                for (int j = 0; j < ClassCount; j++)
                {
                    tempSample.DataArraySingle[varCount] = (float)j;
                    boost.Predict(tempSample, null, weakResponses);
                    double sum = weakResponses.Sum().Val0;
                    if (maxSum < sum)
                    {
                        maxSum = sum;
                        bestClass = j + 'A';
                    }
                }

                r = (Math.Abs(bestClass - responses.DataArraySingle[i]) < float.Epsilon) ? 1 : 0;

                if (i < ntrainSamples)
                    trainHr += r;
                else
                    testHr += r;
            }

            testHr /= (double)(nsamplesAall - ntrainSamples);
            trainHr /= (double)ntrainSamples;
            Console.WriteLine("Recognition rate: train = {0:F1}%, test = {1:F1}%", trainHr * 100.0, testHr * 100.0);
            Console.WriteLine("Number of trees: {0}", boost.GetWeakPredictors().Total);

            // Save classifier to file if needed
            if (filenameToSave != null)
            {
                boost.Save(filenameToSave);
            }


            Console.Read();


            tempSample.Dispose();
            weakResponses.Dispose();
            if (varType != null) varType.Dispose();
            data.Dispose();
            responses.Dispose();
            boost.Dispose();
        }
示例#13
0
 /// <summary>
 /// オブジェクトが確保されている場合にDisposeします
 /// </summary>
 /// <param name="obj"></param>
 public static void DisposeToNull(ref CvMat obj)
 {
     if (obj != null)
     {
         obj.Dispose();
         obj = null;
     }
 }
示例#14
0
 /// <summary>
 /// 領域が未確保またはフォーマットが異なる場合は新しく領域を確保します.
 /// </summary>
 /// <param name="dest"></param>
 /// <param name="rows"></param>
 /// <param name="cols"></param>
 /// <param name="type"></param>
 public static void InitCvMat(ref CvMat dest, int rows, int cols, MatrixType type)
 {
     if (dest == null || dest.Cols != cols || dest.Rows != rows || dest.ElemType != type)
     {
         if (dest != null)
         {
             dest.Dispose();
         }
         dest = new CvMat(rows, cols, type);
     }
 }
        private void Transform(double[] srcPoints)
        {
            const int POINT_COUNT = 8;
            System.Diagnostics.Debug.Assert(srcPoints.Length == POINT_COUNT);
            double leftOffset = (srcGrid.Width - imgRaw.Source.Width) / 2;
            double topOffset = (srcGrid.Height - imgRaw.Source.Height) / 2;

            CvMat srcPointsMat = new CvMat(4, 2, MatrixType.F64C1, srcPoints);
            CvMat dstPointsMat = new CvMat(4, 2, MatrixType.F64C1,
                new double[POINT_COUNT] {
                    dstGrid.Width * 1 / 4, dstGrid.Height * 1 / 4, dstGrid.Width * 3 / 4, dstGrid.Height * 1 / 4,
                    dstGrid.Width * 3 / 4, dstGrid.Height * 3 / 4, dstGrid.Width * 1 / 4, dstGrid.Height * 3 / 4 });
            CvMat viewerHomographyMatrix = new CvMat(3, 3, MatrixType.F64C1, new double[9]);
            Cv.FindHomography(srcPointsMat, dstPointsMat, viewerHomographyMatrix);

            CV.Mat src = WriteableBitmapConverter.ToMat((WriteableBitmap)imgRaw.Source);
            CV.Mat dst = new CV.Mat((int)srcGrid.Height, (int)srcGrid.Width, src.Type());
            Cv.WarpPerspective(src.ToCvMat(), dst.ToCvMat(), viewerHomographyMatrix);
            imgTransformed.Source = WriteableBitmapConverter.ToWriteableBitmap(dst);

            srcPointsMat.Dispose();
            dstPointsMat.Dispose();
            src.Dispose();
            dst.Dispose();
        }