コード例 #1
0
ファイル: RTreesTest.cs プロジェクト: zjnny/opencvsharp
        public void SaveLoadTest()
        {
            float[,] trainFeaturesData =
            {
                 {0, 0},
                 {0, 100},
                 {100, 0},
                 {100, 100},
            };
            var trainFeatures = new Mat(4, 2, MatType.CV_32F, trainFeaturesData);

            int[] trainLabelsData = { 1, -1, 1, -1 };
            var trainLabels = new Mat(4, 1, MatType.CV_32S, trainLabelsData);

            const string fileName = "rtrees.yml";
            if (File.Exists(fileName))
                File.Delete(fileName);

            using (var model = RTrees.Create())
            {
                model.Train(trainFeatures, SampleTypes.RowSample, trainLabels);

                model.Save(fileName);
            }

            Assert.True(File.Exists(fileName));

            string content = File.ReadAllText(fileName);
            //Console.WriteLine(content);

            // Assert.DoesNotThrow
            using (var model2 = RTrees.Load(fileName)) { }
            using (var model2 = RTrees.LoadFromString(content)) { }
        }
コード例 #2
0
ファイル: RTreesTest.cs プロジェクト: zhaojinbo6/opencvsharp
        public void RunTest()
        {
            float[,] trainFeaturesData =
            {
                {   0,   0 },
                {   0, 100 },
                { 100,   0 },
                { 100, 100 },
            };
            var trainFeatures = new Mat(4, 2, MatType.CV_32F, trainFeaturesData);

            int[] trainLabelsData = { 1, -1, 1, -1 };
            var   trainLabels     = new Mat(4, 1, MatType.CV_32S, trainLabelsData);

            var model = RTrees.Create();

            model.Train(trainFeatures, SampleTypes.RowSample, trainLabels);

            float[] testFeatureData = { 90, 90 };
            var     testFeature     = new Mat(1, 2, MatType.CV_32F, testFeatureData);

            var detectedClass = (int)model.Predict(testFeature);

            Assert.Equal(-1, detectedClass);
        }
コード例 #3
0
ファイル: OpenCVModel.cs プロジェクト: speedyjeff/mlmodels
        public override void Load(string path)
        {
            if (TrainedModel != null)
            {
                throw new InvalidOperationException("May only train/load a model once");
            }

            TrainedModel = RTrees.Load(path);
        }
コード例 #4
0
ファイル: OpenCVModel.cs プロジェクト: speedyjeff/mlmodels
        public override void Train(float[,] data, List <float> labels)
        {
            if (TrainedModel != null)
            {
                throw new InvalidOperationException("May only train/load a model once");
            }
            if (data.GetLength(0) != labels.Count)
            {
                throw new InvalidOperationException("Input data and label length must match");
            }

            var dataInput  = InputArray.Create <float>(data);
            var labelInput = InputArray.Create <float>(labels);

            TrainedModel = RTrees.Create();
            TrainedModel.Train(dataInput, SampleTypes.RowSample, labelInput);
        }
コード例 #5
0
        private void train()
        {
            try
            {
                rtrees                = new RTrees();
                rtrees.MaxDepth       = 2;
                rtrees.MinSampleCount = 10;
                rtrees.MaxCategories  = 43;

                rtrees.Train(TrainData, Emgu.CV.ML.MlEnum.DataLayoutType.RowSample, TrainLabel);
                rtrees.Save("randomforest.xml");
            }
            catch (System.AccessViolationException)
            {
                //recognier = new EigenFaceRecognizer(80, double.PositiveInfinity);
                System.Console.WriteLine("Exception");
            }
        }
コード例 #6
0
ファイル: AutoTestML.cs プロジェクト: zhc341272/emgucv
        public void TestRTreesLetterRecognition()
        {
            Matrix <float> data, response;

            ReadLetterRecognitionData(out data, out response);

            int trainingSampleCount = (int)(data.Rows * 0.8);

            Matrix <Byte> varType = new Matrix <byte>(data.Cols + 1, 1);

            varType.SetValue((byte)MlEnum.VarType.Numerical);         //the data is numerical
            varType[data.Cols, 0] = (byte)MlEnum.VarType.Categorical; //the response is catagorical

            Matrix <byte> sampleIdx = new Matrix <byte>(data.Rows, 1);

            using (Matrix <byte> sampleRows = sampleIdx.GetRows(0, trainingSampleCount, 1))
                sampleRows.SetValue(255);

            using (RTrees forest = new RTrees())
                using (
                    TrainData td = new TrainData(data, MlEnum.DataLayoutType.RowSample, response, null, sampleIdx, null,
                                                 varType))
                {
                    forest.MaxDepth               = 10;
                    forest.MinSampleCount         = 10;
                    forest.RegressionAccuracy     = 0.0f;
                    forest.UseSurrogates          = false;
                    forest.MaxCategories          = 15;
                    forest.CalculateVarImportance = true;
                    forest.ActiveVarCount         = 4;
                    forest.TermCriteria           = new MCvTermCriteria(100, 0.01f);
                    bool success = forest.Train(td);

                    if (!success)
                    {
                        return;
                    }

                    double trainDataCorrectRatio = 0;
                    double testDataCorrectRatio  = 0;
                    for (int i = 0; i < data.Rows; i++)
                    {
                        using (Matrix <float> sample = data.GetRow(i))
                        {
                            double r = forest.Predict(sample, null);
                            r = Math.Abs(r - response[i, 0]);
                            if (r < 1.0e-5)
                            {
                                if (i < trainingSampleCount)
                                {
                                    trainDataCorrectRatio++;
                                }
                                else
                                {
                                    testDataCorrectRatio++;
                                }
                            }
                        }
                    }

                    trainDataCorrectRatio /= trainingSampleCount;
                    testDataCorrectRatio  /= (data.Rows - trainingSampleCount);

                    StringBuilder builder = new StringBuilder("Variable Importance: ");

                    /*
                     * using (Matrix<float> varImportance = forest.VarImportance)
                     * {
                     * for (int i = 0; i < varImportance.Cols; i++)
                     * {
                     *    builder.AppendFormat("{0} ", varImportance[0, i]);
                     * }
                     * }*/

                    EmguAssert.WriteLine(String.Format("Prediction accuracy for training data :{0}%",
                                                       trainDataCorrectRatio * 100));
                    EmguAssert.WriteLine(String.Format("Prediction accuracy for test data :{0}%", testDataCorrectRatio * 100));
                    EmguAssert.WriteLine(builder.ToString());
                }
        }
コード例 #7
0
ファイル: AutoTestML.cs プロジェクト: samuto/UnityOpenCV
        public void TestRTreesLetterRecognition()
        {
            Matrix<float> data, response;
             ReadLetterRecognitionData(out data, out response);

             int trainingSampleCount = (int)(data.Rows * 0.8);

             Matrix<Byte> varType = new Matrix<byte>(data.Cols + 1, 1);
             varType.SetValue((byte)MlEnum.VAR_TYPE.NUMERICAL); //the data is numerical
             varType[data.Cols, 0] = (byte) MlEnum.VAR_TYPE.CATEGORICAL; //the response is catagorical

             Matrix<byte> sampleIdx = new Matrix<byte>(data.Rows, 1);
             using (Matrix<byte> sampleRows = sampleIdx.GetRows(0, trainingSampleCount, 1))
            sampleRows.SetValue(255);

             MCvRTParams param = new MCvRTParams();
             param.maxDepth = 10;
             param.minSampleCount = 10;
             param.regressionAccuracy = 0.0f;
             param.useSurrogates = false;
             param.maxCategories = 15;
             param.priors = IntPtr.Zero;
             param.calcVarImportance = true;
             param.nactiveVars = 4;
             param.termCrit = new MCvTermCriteria(100, 0.01f);
             param.termCrit.type = Emgu.CV.CvEnum.TERMCRIT.CV_TERMCRIT_ITER;

             using (RTrees forest = new RTrees())
             {
            bool success = forest.Train(
               data,
               Emgu.CV.ML.MlEnum.DATA_LAYOUT_TYPE.ROW_SAMPLE,
               response,
               null,
               sampleIdx,
               varType,
               null,
               param);

            if (!success) return;

            double trainDataCorrectRatio = 0;
            double testDataCorrectRatio = 0;
            for (int i = 0; i < data.Rows; i++)
            {
               using (Matrix<float> sample = data.GetRow(i))
               {
                  double r = forest.Predict(sample, null);
                  r = Math.Abs(r - response[i, 0]);
                  if (r < 1.0e-5)
                  {
                     if (i < trainingSampleCount)
                        trainDataCorrectRatio++;
                     else
                        testDataCorrectRatio++;
                  }
               }
            }

            trainDataCorrectRatio /= trainingSampleCount;
            testDataCorrectRatio /= (data.Rows - trainingSampleCount);

            StringBuilder builder = new StringBuilder("Variable Importance: ");
            using (Matrix<float> varImportance = forest.VarImportance)
            {
               for (int i = 0; i < varImportance.Cols; i++)
               {
                  builder.AppendFormat("{0} ", varImportance[0, i]);
               }
            }

            Trace.WriteLine(String.Format("Prediction accuracy for training data :{0}%", trainDataCorrectRatio*100));
            Trace.WriteLine(String.Format("Prediction accuracy for test data :{0}%", testDataCorrectRatio*100));
            Trace.WriteLine(builder.ToString());
             }
        }
コード例 #8
0
        private void btnTrain_Click(object sender, EventArgs e)
        {
            {/*
              * Matrix<float> samp = new Matrix<float>(2834 + 2 * 1218, 5120, 1);
              * Matrix<float> labels = new Matrix<float>(2834 + 2 * 1218, 1, 1);
              * int mat_row_index = 0;
              * List<Image<Bgr, byte>> list_train_pos = new List<Image<Bgr, byte>>();
              * List<Image<Bgr, byte>> list_train_neg = new List<Image<Bgr, byte>>();
              *
              * //INITIAL TRAINING OF POSITIVE WINDOWS
              *
              * string[] pos_train_images_paths = Directory.GetFiles(@"D:\uOttawa\Winter 2018\INRIAPerson\INRIAPerson\train_64x128_H96\pos");
              * foreach (var pos_train_image_path in pos_train_images_paths)
              * {
              *     Image<Bgr, byte> pos_train_image = new Image<Bgr, byte>(pos_train_image_path);
              *     pos_train_image = pos_train_image.SmoothGaussian(3);
              *     Rectangle roi;
              *     if (pos_train_image.Width == 70)
              *         roi = new Rectangle(6, 6, 64, 128);
              *     else
              *         roi = new Rectangle(32, 32, 64, 128);
              *     pos_train_image.ROI = roi;
              *     list_train_pos.Add(pos_train_image.Copy());
              *     List<Image<Gray, UInt16>> channels = functionsGets.GetACF(pos_train_image.Copy());
              *     Matrix<float> s = functionsGets.getMatFromImageList(channels);
              *     coreFunctions.AddACFToMat(samp, s, mat_row_index);
              *     labels.Data[mat_row_index, 0] = 1;
              *     mat_row_index++;
              * }
              * Console.WriteLine("Added Pos");
              *
              * //INITIAL TRAINING OF NEGATIVE WINDOWS
              *
              * string[] neg_train_image_paths = Directory.GetFiles(@"D:\uOttawa\Winter 2018\INRIAPerson\INRIAPerson\Train\neg");
              * foreach (var neg_train_image_path in neg_train_image_paths)
              * {
              *     Image<Bgr, byte> neg_train_image = new Image<Bgr, byte>(neg_train_image_path);
              *     neg_train_image = neg_train_image.SmoothGaussian(3);
              *     List<Image<Bgr, byte>> rois = functionsGets.GetRandomNegRectangles(neg_train_image.Copy(), 2);
              *     foreach (var roi in rois)
              *     {
              *         list_train_neg.Add(roi.Copy());
              *         List<Image<Gray, UInt16>> channels = functionsGets.GetACF(roi.Copy());
              *         Matrix<float> s = functionsGets.getMatFromImageList(channels);
              *         coreFunctions.AddACFToMat(samp, s, mat_row_index);
              *         labels.Data[mat_row_index, 0] = 2;
              *         mat_row_index++;
              *     }
              * }
              * Console.WriteLine("Added Neg");
              *
              * TrainData td = new TrainData(samp, Emgu.CV.ML.MlEnum.DataLayoutType.RowSample, labels);
              *
              * Console.WriteLine("Training forest");
              *
              * RTrees forest = new RTrees();
              * forest.MaxDepth = 2;
              * forest.Use1SERule = true;
              * forest.Train(td);
              *
              * Console.WriteLine("Getting HN");
              *
              * List<Image<Bgr, byte>> list_negs_retrain = new List<Image<Bgr, byte>>();
              * foreach (var neg in list_train_neg)
              * {
              *     list_negs_retrain.Add(neg);
              *     List<Image<Gray, UInt16>> channels = functionsGets.GetACF(neg.Copy());
              *     Matrix<float> s = functionsGets.getMatFromImageList(channels);
              *     float x = forest.Predict(s);
              *     if (Math.Round(x) == 1)
              *     {
              *         list_negs_retrain.Add(neg);
              *     }
              * }
              *
              * Console.WriteLine("Getting HP");
              *
              * List<Image<Bgr, byte>> list_pos_retrain = new List<Image<Bgr, byte>>();
              * foreach (var pos in list_train_pos)
              * {
              *     list_pos_retrain.Add(pos);
              *     List<Image<Gray, UInt16>> channels = functionsGets.GetACF(pos.Copy());
              *     Matrix<float> s = functionsGets.getMatFromImageList(channels);
              *     float x = forest.Predict(s);
              *     if (Math.Round(x) == 2)
              *     {
              *         list_pos_retrain.Add(pos);
              *     }
              * }
              *
              * Console.WriteLine("Equalizing");
              * int diff = Math.Abs(list_train_pos.Count - list_negs_retrain.Count);
              * for (int i = 0; i < diff; i++)
              * {
              *     Random rand = new Random();
              *     if (list_pos_retrain.Count > list_negs_retrain.Count)
              *     {
              *         int index = rand.Next(0, neg_train_image_paths.Count());
              *         Image<Bgr, byte> neg = new Image<Bgr, byte>(neg_train_image_paths[index]);
              *         List<Image<Bgr, byte>> neg_to_be_added = functionsGets.GetRandomNegRectangles(neg.Copy(), 1);
              *         list_negs_retrain.Add(neg_to_be_added[0].Copy());
              *     }
              *     else
              *     {
              *         int index = rand.Next(0, list_train_pos.Count - 1);
              *         list_pos_retrain.Add(list_train_pos[index].Copy());
              *     }
              * }
              *
              * samp = new Matrix<float>(list_pos_retrain.Count + list_negs_retrain.Count, 5120, 1);
              * labels = new Matrix<float>(list_pos_retrain.Count + list_negs_retrain.Count, 1, 1);
              * mat_row_index = 0;
              *
              * Console.WriteLine("Readding pos");
              * foreach (var pos in list_pos_retrain)
              * {
              *     List<Image<Gray, UInt16>> channels = functionsGets.GetACF(pos.Copy());
              *     Matrix<float> s = functionsGets.getMatFromImageList(channels);
              *     coreFunctions.AddACFToMat(samp, s, mat_row_index);
              *     labels.Data[mat_row_index, 0] = 1;
              *     mat_row_index++;
              * }
              * Console.WriteLine("Readding neg");
              * foreach (var neg in list_negs_retrain)
              * {
              *     List<Image<Gray, UInt16>> channels = functionsGets.GetACF(neg.Copy());
              *     Matrix<float> s = functionsGets.getMatFromImageList(channels);
              *     coreFunctions.AddACFToMat(samp, s, mat_row_index);
              *     labels.Data[mat_row_index, 0] = 2;
              *     mat_row_index++;
              * }
              * Console.WriteLine("Retraining");
              * TrainData tdf = new TrainData(samp, Emgu.CV.ML.MlEnum.DataLayoutType.RowSample, labels);
              * RTrees forestf = new RTrees();
              * forestf.MaxDepth = 2;
              * forestf.Use1SERule = true;
              * forestf.Train(tdf);
              *
              * FileStorage fs = new FileStorage(@"C:\Users\welah\Desktop\testing\" + "forest.xml", FileStorage.Mode.Write);
              * forestf.Write(fs);
              * fs.ReleaseAndGetString();
              *
              *
              * Console.WriteLine("Predicting pos");
              * int total_pos = 0;
              * int total_neg = 0;
              * int ann = 0;
              * List<float> xspos = new List<float>();
              * List<float> xsneg = new List<float>();
              * //string[] image_folders_pos = Directory.GetDirectories(@"D:\uOttawa\Winter 2018\INRIAPerson\INRIAPerson\Test\pos");
              * string[] images_paths_pos = Directory.GetFiles(@"D:\uOttawa\Winter 2018\INRIAPerson\INRIAPerson\test_64x128_H96\pos");
              * foreach (var image_path in images_paths_pos)
              * {
              *     Image<Bgr, byte> img_original = new Image<Bgr, byte>(image_path);
              *     img_original = img_original.SmoothGaussian(3);
              *     Rectangle roi;
              *     if (img_original.Width == 70)
              *         roi = new Rectangle(6, 6, 64, 128);
              *     else
              *         roi = new Rectangle(32, 32, 64, 128);
              *     img_original.ROI = new Rectangle(6, 6, 64, 128);
              *
              *     List<Image<Gray, UInt16>> channels = functionsGets.GetACF(img_original);
              *     Matrix<float> s = functionsGets.getMatFromImageList(channels);
              *     float x = forestf.Predict(s);
              *     xspos.Add(x);
              *     if (Math.Round(x) == 1)
              *         total_pos++;
              * }
              * Console.WriteLine("Predicting neg");
              * string[] images_paths_neg = Directory.GetFiles(@"D:\uOttawa\Winter 2018\INRIAPerson\INRIAPerson\Test\neg");
              * foreach (var image_path in images_paths_neg)
              * {
              *     Image<Bgr, byte> img_original = new Image<Bgr, byte>(image_path);
              *     img_original = img_original.SmoothGaussian(3);
              *     List<Image<Bgr, byte>> list_negs = functionsGets.GetRandomNegRectangles(img_original, 2);
              *     foreach (var neg in list_negs)
              *     {
              *         List<Image<Gray, UInt16>> channels = functionsGets.GetACF(neg);
              *         Matrix<float> s = functionsGets.getMatFromImageList(channels);
              *         float x = forestf.Predict(s);
              *         xsneg.Add(x);
              *         if (Math.Round(x) == 2)
              *             total_neg++;
              *     }
              * }
              * int posbel = 0;
              * int negab = 0;
              * foreach (var item in xspos)
              *     if (item <= 1.3)
              *         posbel++;
              * foreach (var item in xsneg)
              *     if (item > 1.3)
              *         negab++;
              * Console.WriteLine("Av pos: {0} - posbel: {1} - posab: {2}", xspos.Average().ToString(), posbel, xspos.Count() - posbel);
              * Console.WriteLine("Av neg: {0} - negab: {1} - negbe: {2}", xsneg.Average().ToString(), negab, xsneg.Count() - negab);
              *
              * float avpos = xspos.Average();
              * float maxpos = xspos.Max();
              * float minpos = xspos.Min();
              * float avneg = xsneg.Average();
              * float maxneg = xsneg.Max();
              * float minneg = xsneg.Min();
              *
              * TextWriter tw = new StreamWriter("xpos.txt");
              *
              * foreach (var x in xspos)
              *     tw.WriteLine(x.ToString());
              *
              * tw.Close();
              *
              * TextWriter tw1 = new StreamWriter("xneg.txt");
              *
              * foreach (var x in xsneg)
              *     tw1.WriteLine(x.ToString());
              *
              * tw1.Close();
              */
            }
            int            nnegs         = 5;
            Matrix <float> samp          = new Matrix <float>(2 * nnegs * 1218, 5120, 1);
            Matrix <float> labels        = new Matrix <float>(2 * +nnegs * 1218, 1, 1);
            int            mat_row_index = 0;

            //string image_folders_pos_folder = tbImagesTrain.Text;
            //string[] image_folders_pos = Directory.GetDirectories(tbPosTrain.Text);
            List <Image <Bgr, byte> > list_poss = new List <Image <Bgr, byte> >();

            string[] image_files_pos = Directory.GetFiles(@"D:\uOttawa\Winter 2018\INRIAPerson\INRIAPerson\train_64x128_H96\pos");
            foreach (var image_path in image_files_pos)
            {
                //string[] channels = Directory.GetFiles(annotation_folder_pos);
                Image <Bgr, byte> img_original = new Image <Bgr, byte>(image_path);
                img_original = img_original.SmoothGaussian(3);
                Image <Bgr, byte> ped = img_original.Copy();
                if (ped.Width == 70)
                {
                    ped.ROI = new Rectangle(3, 0, 64, 128);
                }
                else
                {
                    ped.ROI = new Rectangle(16, 16, 64, 128);
                }
                list_poss.Add(ped);
                if (ped.Width == 64 && ped.Height == 128)
                {
                    List <Image <Gray, UInt16> > channels = functionsGets.GetACF(ped);
                    Matrix <float> s = functionsGets.getMatFromImageList(channels);
                    //Matrix<int> l = getLabelPos();
                    coreFunctions.AddACFToMat(samp, s, mat_row_index);
                    labels.Data[mat_row_index, 0] = 1;
                    mat_row_index++;
                }
            }

            List <Image <Bgr, byte> > list_negs = new List <Image <Bgr, byte> >();

            string[] image_files_neg = Directory.GetFiles(@"D:\uOttawa\Winter 2018\INRIAPerson\INRIAPerson\Train\neg");
            foreach (var image_path in image_files_neg)
            {
                //string[] channels = Directory.GetFiles(annotation_folder_pos);
                Image <Bgr, byte> img_original = new Image <Bgr, byte>(image_path);
                img_original = img_original.SmoothGaussian(3);
                //List<Rectangle> list_rects = coreFunctions.getBoundingBoxes(image_path, @"D:\uOttawa\Winter 2018\INRIAPerson\INRIAPerson\Train\annotations");
                List <Image <Bgr, byte> > list_neg_windows = functionsGets.GetRandomNegRectangles(img_original, nnegs);
                foreach (var img in list_neg_windows)
                {
                    //Image<Bgr, byte> temp_neg = img_original.Copy();
                    //temp_neg.ROI = rect;
                    list_negs.Add(img);
                    List <Image <Gray, UInt16> > channels = functionsGets.GetACF(img);
                    Matrix <float> s = functionsGets.getMatFromImageList(channels);
                    //Matrix<float> l = getLabelNeg();
                    coreFunctions.AddACFToMat(samp, s, mat_row_index);
                    labels.Data[mat_row_index, 0] = 2;
                    mat_row_index++;
                    //samp.Mat.PushBack(s.Mat);
                    //labels.Mat.PushBack(l.Mat);
                }
            }
            int difference = Math.Abs(list_poss.Count() - list_negs.Count());

            for (int i = 0; i < difference; i++)
            {
                Random                       random   = new Random();
                int                          index    = random.Next(0, list_poss.Count() - 1);
                Image <Bgr, byte>            pos      = list_poss[index];
                List <Image <Gray, UInt16> > channels = functionsGets.GetACF(pos);
                Matrix <float>               s        = functionsGets.getMatFromImageList(channels);
                //Matrix<int> l = getLabelPos();
                coreFunctions.AddACFToMat(samp, s, mat_row_index);
                labels.Data[mat_row_index, 0] = 1;
                mat_row_index++;
            }
            List <Image <Bgr, byte> > list_negs_final = new List <Image <Bgr, byte> >();
            List <Image <Bgr, byte> > list_poss_final = new List <Image <Bgr, byte> >();

            Console.WriteLine(samp.Size);
            Console.WriteLine("GOING ONCE!");
            TrainData td   = new TrainData(samp, DataLayoutType.RowSample, labels);
            RTrees    tree = new RTrees();

            //tree.TermCriteria = new MCvTermCriteria(50);
            //tree.TruncatePrunedTree = true;
            //tree.MaxDepth = 2;
            tree.Use1SERule   = true;
            tree.TermCriteria = new MCvTermCriteria(0.75);
            //tree.TermCriteria = new MCvTermCriteria(200);
            tree.Train(td);
            Console.WriteLine("SOLD!");

            foreach (var pos in list_poss)
            {
                List <Image <Gray, UInt16> > channels = functionsGets.GetACF(pos);
                Matrix <float> s = functionsGets.getMatFromImageList(channels);
                float          x = tree.Predict(s);
                if (Math.Round(x) == 2)
                {
                    list_poss_final.Add(pos);
                }
            }
            foreach (var pos in list_poss)
            {
                list_poss_final.Add(pos);
            }

            foreach (var neg in list_negs)
            {
                List <Image <Gray, UInt16> > channels = functionsGets.GetACF(neg);
                Matrix <float> s = functionsGets.getMatFromImageList(channels);
                float          x = tree.Predict(s);
                if (Math.Round(x) == 1)
                {
                    list_negs_final.Add(neg);
                }
            }
            foreach (var neg in list_negs)
            {
                list_negs_final.Add(neg);
            }

            //List<Image<Bgr, byte>> finalest_pos = list_poss_final;
            //List<Image<Bgr, byte>> finalest_neg = list_negs_final;
            List <Image <Bgr, byte> > finalest_pos = new List <Image <Bgr, byte> >();
            List <Image <Bgr, byte> > finalest_neg = new List <Image <Bgr, byte> >();

            if (list_poss_final.Count() > list_negs_final.Count())
            {
                int posc = list_poss_final.Count();
                int negc = list_negs_final.Count();
                int diff = posc - negc;
                for (int i = 0; i < diff; i++)
                {
                    Random random = new Random();
                    int    index  = random.Next(0, list_negs_final.Count() - 1);
                    finalest_neg.Add(list_negs_final[index]);
                }
                foreach (var neg in list_negs_final)
                {
                    finalest_neg.Add(neg);
                }
                finalest_pos = list_poss_final;
            }
            else if (list_negs_final.Count() > list_poss_final.Count())
            {
                int posc = list_poss_final.Count();
                int negc = list_negs_final.Count();
                int diff = negc - posc;
                for (int i = 0; i < diff; i++)
                {
                    Random random = new Random();
                    int    index  = random.Next(0, list_poss_final.Count() - 1);
                    finalest_pos.Add(list_poss_final[index]);
                }
                foreach (var pos in list_poss_final)
                {
                    finalest_pos.Add(pos);
                }
                finalest_neg = list_negs_final;
            }
            samp          = new Matrix <float>(finalest_pos.Count() + finalest_neg.Count(), 5120, 1);
            labels        = new Matrix <float>(finalest_pos.Count() + finalest_neg.Count(), 1, 1);
            mat_row_index = 0;
            foreach (var pos in finalest_pos)
            {
                List <Image <Gray, UInt16> > channels = functionsGets.GetACF(pos);
                Matrix <float> s = functionsGets.getMatFromImageList(channels);
                //Matrix<int> l = getLabelPos();
                coreFunctions.AddACFToMat(samp, s, mat_row_index);
                labels.Data[mat_row_index, 0] = 1;
                mat_row_index++;
            }
            foreach (var neg in finalest_neg)
            {
                List <Image <Gray, UInt16> > channels = functionsGets.GetACF(neg);
                Matrix <float> s = functionsGets.getMatFromImageList(channels);
                //Matrix<int> l = getLabelPos();
                coreFunctions.AddACFToMat(samp, s, mat_row_index);
                labels.Data[mat_row_index, 0] = 2;
                mat_row_index++;
            }
            Console.WriteLine("GOING TWICE!");
            td   = new TrainData(samp, Emgu.CV.ML.MlEnum.DataLayoutType.RowSample, labels);
            tree = new RTrees();
            //tree.TermCriteria = new MCvTermCriteria(50);
            //tree.TruncatePrunedTree = true;
            //tree.MaxDepth = 2;
            tree.Use1SERule   = true;
            tree.TermCriteria = new MCvTermCriteria(0.75);
            //tree.TermCriteria = new MCvTermCriteria(200);
            tree.Train(td);
            Console.WriteLine("SOLD!");


            int          total_pos = 0;
            int          total_neg = 0;
            int          ann       = 0;
            List <float> xspos     = new List <float>();
            List <float> xsneg     = new List <float>();

            //string[] image_folders_pos = Directory.GetDirectories(@"D:\uOttawa\Winter 2018\INRIAPerson\INRIAPerson\Test\pos");
            string[] images_paths_pos = Directory.GetFiles(@"D:\uOttawa\Winter 2018\INRIAPerson\INRIAPerson\test_64x128_H96\pos");
            foreach (var image_path in images_paths_pos)
            {
                Image <Bgr, byte> img_original = new Image <Bgr, byte>(image_path);
                img_original = img_original.SmoothGaussian(3);
                //List<Rectangle> list_bounding_boxes = coreFunctions.getBoundingBoxes(image_path, tbAnnotations.Text);
                Image <Bgr, byte> ped = img_original;
                ped.ROI = new Rectangle(3, 0, 64, 128);

                if (ped.Width == 64 && ped.Height == 128)
                {
                    List <Image <Gray, UInt16> > channels = functionsGets.GetACF(ped);
                    Matrix <float> s = functionsGets.getMatFromImageList(channels);
                    float          x = tree.Predict(s);
                    xspos.Add(x);
                    if (Math.Round(x) == 1)
                    {
                        total_pos++;
                    }
                }
            }
            string[] images_paths_neg = Directory.GetFiles(@"D:\uOttawa\Winter 2018\INRIAPerson\INRIAPerson\Test\neg");
            foreach (var image_path in images_paths_neg)
            {
                Image <Bgr, byte> img_original = new Image <Bgr, byte>(image_path);
                img_original = img_original.SmoothGaussian(3);
                //List<Rectangle> list_bounding_boxes = coreFunctions.getBoundingBoxes(image_path, tbAnnotations.Text);
                List <Image <Bgr, byte> > list_neg_windows = functionsGets.GetRandomNegRectangles(img_original, nnegs);
                foreach (var img in list_neg_windows)
                {
                    //Image<Bgr, byte> neg = img_original;
                    //neg.ROI = rectangle;
                    //if (neg.Width > 64 || neg.Height > 128)
                    //{
                    //    neg = neg.Resize(64, 128, Inter.Linear);
                    //}
                    List <Image <Gray, UInt16> > channels = functionsGets.GetACF(img);
                    Matrix <float> s = functionsGets.getMatFromImageList(channels);
                    float          x = tree.Predict(s);
                    xsneg.Add(x);
                    if (Math.Round(x) == 2)
                    {
                        total_neg++;
                    }
                }
            }
            int posbel = 0;
            int negab  = 0;

            foreach (var item in xspos)
            {
                if (item < 1.5)
                {
                    posbel++;
                }
            }
            foreach (var item in xsneg)
            {
                if (item > 1.5)
                {
                    negab++;
                }
            }
            Console.WriteLine("Av pos: {0} - posbel: {1} - posab: {2}", xspos.Average().ToString(), posbel, xspos.Count() - posbel);
            Console.WriteLine("Av neg: {0} - negab: {1} - negbe: {2}", xsneg.Average().ToString(), negab, xsneg.Count() - negab);
            FileStorage fs = new FileStorage(tbOutputExtract.Text + @"\forest.xml", FileStorage.Mode.Write);

            tree.Write(fs);
            fs.ReleaseAndGetString();
        }
コード例 #9
0
 // load
 public ModelOpenCV(string path)
 {
     TrainedModel = RTrees.Load(path);
 }
コード例 #10
0
        // train
        public ModelOpenCV(List <ModelDataSet> input, ModelValue prediction)
        {
            if (input == null || input.Count == 0)
            {
                throw new Exception("Must have valid input");
            }

            // convert features into proper form
            var features = new float[input.Count, input[0].Features()];
            var labels   = new float[input.Count];

            for (int i = 0; i < input.Count; i++)
            {
                for (int j = 0; j < input[i].Features(); j++)
                {
                    features[i, j] = input[i].Feature(j);
                }

                switch (prediction)
                {
                case ModelValue.Action: labels[i] = input[i].Action; break;

                case ModelValue.Angle: labels[i] = input[i].FaceAngle; break;

                case ModelValue.XY: labels[i] = input[i].MoveAngle; break;

                default: throw new Exception("Unknown prediction type : " + prediction);
                }
            }

            // train
            var labelInput = InputArray.Create <float>(labels);
            var dataInput  = InputArray.Create <float>(features);

            TrainedModel = RTrees.Create();
            TrainedModel.RegressionAccuracy = 0.00001f;
            // RTrees.MaxDepath (r^2)
            //  default - action 0.3424, xy 0.1735, angle 0.2208
            //    5     -
            //   20     - action 0.6482, xy 0.5912, angle 0.6414 (new default)
            //  100     - action 0.6408, xy 0.5914, angle 0.6419
            TrainedModel.MaxDepth = 20;
            // RTress.MinSampleCount
            //  default(10) - see 20 above
            //     1        - actions 0.6625, xy 0.5077, angle 0.6376
            //    50        - actions 0.6464, xy 0.5627, angle 0.6217
            //TrainedModel.MinSampleCount = 1;

            // fails
            //TrainedModel = LogisticRegression.Create();

            //  fails
            // TrainedModel = DTrees.Create();

            // failed
            //TrainedModel = SVM.Create();
            //TrainedModel.KernelType = SVM.KernelTypes.Linear;
            //TrainedModel.Type = SVM.Types.NuSvr;
            //TrainedModel.C = 1;
            //TrainedModel.P = 0.01;
            //TrainedModel.Gamma = 10f;
            //TrainedModel.Degree = 0.1;
            //TrainedModel.Coef0 = 0;
            //TrainedModel.Nu = 0.1;

            TrainedModel.Train(dataInput, SampleTypes.RowSample, labelInput);
        }
コード例 #11
0
ファイル: AutoTestML.cs プロジェクト: dtfirewind/UnityOpenCV
        public void TestRTreesLetterRecognition()
        {
            Matrix <float> data, response;

            ReadLetterRecognitionData(out data, out response);

            int trainingSampleCount = (int)(data.Rows * 0.8);

            Matrix <Byte> varType = new Matrix <byte>(data.Cols + 1, 1);

            varType.SetValue((byte)MlEnum.VAR_TYPE.NUMERICAL);         //the data is numerical
            varType[data.Cols, 0] = (byte)MlEnum.VAR_TYPE.CATEGORICAL; //the response is catagorical

            Matrix <byte> sampleIdx = new Matrix <byte>(data.Rows, 1);

            using (Matrix <byte> sampleRows = sampleIdx.GetRows(0, trainingSampleCount, 1))
                sampleRows.SetValue(255);

            MCvRTParams param = new MCvRTParams();

            param.maxDepth           = 10;
            param.minSampleCount     = 10;
            param.regressionAccuracy = 0.0f;
            param.useSurrogates      = false;
            param.maxCategories      = 15;
            param.priors             = IntPtr.Zero;
            param.calcVarImportance  = true;
            param.nactiveVars        = 4;
            param.termCrit           = new MCvTermCriteria(100, 0.01f);
            param.termCrit.type      = Emgu.CV.CvEnum.TERMCRIT.CV_TERMCRIT_ITER;

            using (RTrees forest = new RTrees())
            {
                bool success = forest.Train(
                    data,
                    Emgu.CV.ML.MlEnum.DATA_LAYOUT_TYPE.ROW_SAMPLE,
                    response,
                    null,
                    sampleIdx,
                    varType,
                    null,
                    param);

                if (!success)
                {
                    return;
                }

                double trainDataCorrectRatio = 0;
                double testDataCorrectRatio  = 0;
                for (int i = 0; i < data.Rows; i++)
                {
                    using (Matrix <float> sample = data.GetRow(i))
                    {
                        double r = forest.Predict(sample, null);
                        r = Math.Abs(r - response[i, 0]);
                        if (r < 1.0e-5)
                        {
                            if (i < trainingSampleCount)
                            {
                                trainDataCorrectRatio++;
                            }
                            else
                            {
                                testDataCorrectRatio++;
                            }
                        }
                    }
                }

                trainDataCorrectRatio /= trainingSampleCount;
                testDataCorrectRatio  /= (data.Rows - trainingSampleCount);

                StringBuilder builder = new StringBuilder("Variable Importance: ");
                using (Matrix <float> varImportance = forest.VarImportance)
                {
                    for (int i = 0; i < varImportance.Cols; i++)
                    {
                        builder.AppendFormat("{0} ", varImportance[0, i]);
                    }
                }

                Trace.WriteLine(String.Format("Prediction accuracy for training data :{0}%", trainDataCorrectRatio * 100));
                Trace.WriteLine(String.Format("Prediction accuracy for test data :{0}%", testDataCorrectRatio * 100));
                Trace.WriteLine(builder.ToString());
            }
        }