コード例 #1
0
        public void Train()
        {
            /*
             * in trainData:    data[i,.,.,.]   = vector
             * trainClasses: classes[i]         = class
             */
            List<KeyValuePair<ColorPair, CardColor>> pairs = new List<KeyValuePair<ColorPair, CardColor>>(GenerateTrainPairs());

            #region Generate the traning data and classes
            Matrix<float> bgrTraining = new Matrix<float>(pairs.Count, 3);
            Matrix<float> hsvTraining = new Matrix<float>(pairs.Count, 3);
            Matrix<float> colorClasses = new Matrix<float>(pairs.Count, 1);

            for (int i = 0; i < pairs.Count; i++)
            {
                bgrTraining[i, 0] = (float)pairs[i].Key.Bgr.Blue;
                bgrTraining[i, 1] = (float)pairs[i].Key.Bgr.Green;
                bgrTraining[i, 2] = (float)pairs[i].Key.Bgr.Red;

                hsvTraining[i, 0] = (float)pairs[i].Key.Hsv.Hue;
                hsvTraining[i, 1] = (float)pairs[i].Key.Hsv.Satuation;
                hsvTraining[i, 2] = (float)pairs[i].Key.Hsv.Value;

                colorClasses[i, 0] = (float)(int)pairs[i].Value;
            }
            #endregion

            bgrClassifier = new KNearest(bgrTraining, colorClasses, null, false, 10);
            hsvClassifier = new KNearest(hsvTraining, colorClasses, null, false, 10);

            try
            {
                bgrClassifier.Save("bgr.txt");
                hsvClassifier.Save("hsv.txt");
            }
            catch (Exception)
            {
            }
        }
コード例 #2
0
ファイル: TestML.cs プロジェクト: pakerliu/sharp-context
        private Image<Bgr, Byte> knn()
        {
            int K = 10;
            int trainSampleCount = 150;
            int sigma = 60;

            #region Generate the training data and classes

            Matrix<float> trainData = new Matrix<float>(trainSampleCount, 2);
            Matrix<float> trainClasses = new Matrix<float>(trainSampleCount, 1);

            Image<Bgr, Byte> img = new Image<Bgr, byte>(500, 500);

            Matrix<float> sample = new Matrix<float>(1, 2);

            Matrix<float> trainData1 = trainData.GetRows(0, trainSampleCount / 3, 1);
            trainData1.GetCols(0, 1).SetRandNormal(new MCvScalar(100), new MCvScalar(sigma));
            trainData1.GetCols(1, 2).SetRandNormal(new MCvScalar(300), new MCvScalar(sigma));

            Matrix<float> trainData2 = trainData.GetRows(trainSampleCount / 3, 2 * trainSampleCount / 3, 1);
            trainData2.SetRandNormal(new MCvScalar(400), new MCvScalar(sigma));

            Matrix<float> trainData3 = trainData.GetRows(2 * trainSampleCount / 3, trainSampleCount, 1);
            trainData3.GetCols(0, 1).SetRandNormal(new MCvScalar(300), new MCvScalar(sigma));
            trainData3.GetCols(1, 2).SetRandNormal(new MCvScalar(100), new MCvScalar(sigma));

            Matrix<float> trainClasses1 = trainClasses.GetRows(0, trainSampleCount / 3, 1);
            trainClasses1.SetValue(1);
            Matrix<float> trainClasses2 = trainClasses.GetRows(trainSampleCount / 3, 2 * trainSampleCount / 3, 1);
            trainClasses2.SetValue(2);
            Matrix<float> trainClasses3 = trainClasses.GetRows(2 * trainSampleCount / 3, trainSampleCount, 1);
            trainClasses3.SetValue(3);

            #endregion

            Matrix<float> results, neighborResponses;
            results = new Matrix<float>(sample.Rows, 1);
            neighborResponses = new Matrix<float>(sample.Rows, K);
            //dist = new Matrix<float>(sample.Rows, K);

            //using (KNearest knn = new KNearest(trainData, trainClasses, null, false, K)) {
            using (KNearest knn = new KNearest()) {
                bool trained = knn.Train(trainData, trainClasses, null, false, K, false);

                for (int i = 0; i < img.Height; i++) {
                    for (int j = 0; j < img.Width; j++) {
                        sample.Data[0, 0] = j;
                        sample.Data[0, 1] = i;

                        //Matrix<float> nearestNeighbors = new Matrix<float>(K* sample.Rows, sample.Cols);
                        // estimates the response and get the neighbors' labels
                        float response = knn.FindNearest(sample, K, results, null, neighborResponses, null);

                        int accuracy = 0;
                        // compute the number of neighbors representing the majority
                        for (int k = 0; k < K; k++) {
                            if (neighborResponses.Data[0, k] == response)
                                accuracy++;
                        }
                        // highlight the pixel depending on the accuracy (or confidence)
                        //img[i, j] =
                        //response == 1 ?
                        //    (accuracy > 5 ? new Bgr(90, 0, 0) : new Bgr(90, 60, 0)) :
                        //    (accuracy > 5 ? new Bgr(0, 90, 0) : new Bgr(60, 90, 0));
                        img[i, j] =
                            response == 1 ? (accuracy > 5 ? new Bgr(90, 0, 0) : new Bgr(90, 30, 30)) :
                           response == 2 ? (accuracy > 5 ? new Bgr(0, 90, 0) : new Bgr(30, 90, 30)) :
                            (accuracy > 5 ? new Bgr(0, 0, 90) : new Bgr(30, 30, 90));
                    }
                }
                knn.Save(@"D:\Play Data\KNN训练数据");
            }

            // display the original training samples

            for (int i = 0; i < (trainSampleCount / 3); i++) {
                PointF p1 = new PointF(trainData1[i, 0], trainData1[i, 1]);
                img.Draw(new CircleF(p1, 2.0f), new Bgr(255, 100, 100), -1);
                PointF p2 = new PointF(trainData2[i, 0], trainData2[i, 1]);
                img.Draw(new CircleF(p2, 2.0f), new Bgr(100, 255, 100), -1);
                PointF p3 = new PointF(trainData3[i, 0], trainData3[i, 1]);
                img.Draw(new CircleF(p3, 2.0f), new Bgr(100, 100, 255), -1);
            }
            return img;
        }