Ejemplo n.º 1
0
 public KNN(int wantedK, VectorRepository rep)
 {
     //k is usually 10
     _K = wantedK;
     _knn = new KNearest();
     _repository = rep;
 }
Ejemplo n.º 2
0
        public void Train(IDictionary<float[], int> trainpairs)
        {
            Matrix<float> vectors;
            Matrix<float> classes;
            ClassifierUtils.GenerateTrainMatrices(trainpairs, out vectors, out classes);

            classifier = new KNearest(vectors, classes, null, false, 10); //TODO: set these parameters in Constructor
        }
Ejemplo n.º 3
0
        public void Train(IDictionary<float[], string> trainpairs)
        {
            Matrix<float> vectors;
            Matrix<float> classes;
            Dictionary<float[], int> trainers = new Dictionary<float[], int>(trainpairs.Count);

            InitLookups(trainpairs);

            foreach (KeyValuePair<float[], string> row in trainpairs)
            {
                trainers.Add(row.Key, TUL[row.Value]);
            }

            ClassifierUtils.GenerateTrainMatrices(trainers, out vectors, out classes);

            classifier = new KNearest(vectors, classes, null, false, 10); //TODO: set these parameters in Constructor
        }
Ejemplo n.º 4
0
        public void Train()
        {
            /*
             * in trainData:    data[i,.,.,.]   = vector
             * trainClasses: classes[i]         = class
             */
            List<KeyValuePair<ColorPair, CardColor>> pairs = new List<KeyValuePair<ColorPair, CardColor>>(GenerateTrainPairs());

            #region Generate the traning data and classes
            Matrix<float> bgrTraining = new Matrix<float>(pairs.Count, 3);
            Matrix<float> hsvTraining = new Matrix<float>(pairs.Count, 3);
            Matrix<float> colorClasses = new Matrix<float>(pairs.Count, 1);

            for (int i = 0; i < pairs.Count; i++)
            {
                bgrTraining[i, 0] = (float)pairs[i].Key.Bgr.Blue;
                bgrTraining[i, 1] = (float)pairs[i].Key.Bgr.Green;
                bgrTraining[i, 2] = (float)pairs[i].Key.Bgr.Red;

                hsvTraining[i, 0] = (float)pairs[i].Key.Hsv.Hue;
                hsvTraining[i, 1] = (float)pairs[i].Key.Hsv.Satuation;
                hsvTraining[i, 2] = (float)pairs[i].Key.Hsv.Value;

                colorClasses[i, 0] = (float)(int)pairs[i].Value;
            }
            #endregion

            bgrClassifier = new KNearest(bgrTraining, colorClasses, null, false, 10);
            hsvClassifier = new KNearest(hsvTraining, colorClasses, null, false, 10);

            try
            {
                bgrClassifier.Save("bgr.txt");
                hsvClassifier.Save("hsv.txt");
            }
            catch (Exception)
            {
            }
        }
Ejemplo n.º 5
0
        public KnnML(string path, int k, ImageVector[] goodImages, ImageVector[] badImages)
        {
            this.k = k;

            type = "KNN";

            knn = new KNearest();

            this.goodImages = goodImages;
            this.badImages = badImages;

            learnedTrue = new bool[goodImages.Length];
            learnedFalse = new bool[badImages.Length];

            confidenceTrue = new double[goodImages.Length];
            confidenceFalse = new double[badImages.Length];

            restartTest();

            userPath = path + "\\KNN";
            resultPath = userPath + "\\" + k;
        }
Ejemplo n.º 6
0
      public void TestKNearest()
      {
         int K = 10;
         int trainSampleCount = 100;

         #region Generate the training data and classes

         Matrix<float> trainData = new Matrix<float>(trainSampleCount, 2);
         Matrix<float> trainClasses = new Matrix<float>(trainSampleCount, 1);

         Image<Bgr, Byte> img = new Image<Bgr, byte>(500, 500);

         Matrix<float> sample = new Matrix<float>(1, 2);

         Matrix<float> trainData1 = trainData.GetRows(0, trainSampleCount >> 1, 1);
         trainData1.SetRandNormal(new MCvScalar(200), new MCvScalar(50));
         Matrix<float> trainData2 = trainData.GetRows(trainSampleCount >> 1, trainSampleCount, 1);
         trainData2.SetRandNormal(new MCvScalar(300), new MCvScalar(50));

         Matrix<float> trainClasses1 = trainClasses.GetRows(0, trainSampleCount >> 1, 1);
         trainClasses1.SetValue(1);
         Matrix<float> trainClasses2 = trainClasses.GetRows(trainSampleCount >> 1, trainSampleCount, 1);
         trainClasses2.SetValue(2);
         #endregion
         
         Matrix<float> results, neighborResponses;
         results = new Matrix<float>(sample.Rows, 1);
         neighborResponses = new Matrix<float>(sample.Rows, K);
         //dist = new Matrix<float>(sample.Rows, K);

         using (KNearest knn = new KNearest())
         {
            knn.DefaultK = K;
            knn.IsClassifier = true;
            knn.Train(trainData, MlEnum.DataLayoutType.RowSample, trainClasses);
            //ParamDef[] defs =  knn.GetParams();
            //TODO: find out when knn.save will be implemented
            //knn.Save("knn.xml");

            for (int i = 0; i < img.Height; i++)
            {
               for (int j = 0; j < img.Width; j++)
               {
                  sample.Data[0, 0] = j;
                  sample.Data[0, 1] = i;

                  // estimates the response and get the neighbors' labels
                  float response = knn.Predict(sample); //knn.FindNearest(sample, K, results, null, neighborResponses, null);

                  int accuracy = 0;
                  // compute the number of neighbors representing the majority
                  for (int k = 0; k < K; k++)
                  {
                     if (neighborResponses.Data[0, k] == response)
                        accuracy++;
                  }
                  // highlight the pixel depending on the accuracy (or confidence)
                  img[i, j] = 
                     response == 1 ?
                        (accuracy > 5 ? new Bgr(90, 0, 0) : new Bgr(90, 40, 0)) :
                        (accuracy > 5 ? new Bgr(0, 90, 0) : new Bgr(40, 90, 0));
               }
            }

            String knnModelStr;
            //save stat model to string
            using (FileStorage fs = new FileStorage(".yml", FileStorage.Mode.Write | FileStorage.Mode.Memory))
            {
               knn.Write(fs);

               knnModelStr = fs.ReleaseAndGetString();
            }

            //load stat model from string
            using (FileStorage fs = new FileStorage(knnModelStr, FileStorage.Mode.Read | FileStorage.Mode.Memory))
            {
               KNearest knn2 = new KNearest();
               knn2.Read(fs.GetRoot());
            }
         }

         // display the original training samples
         for (int i = 0; i < (trainSampleCount >> 1); i++)
         {
            PointF p1 = new PointF(trainData1[i, 0], trainData1[i, 1]);
            img.Draw(new CircleF(p1, 2.0f), new Bgr(255, 100, 100), -1);
            PointF p2 = new PointF(trainData2[i, 0], trainData2[i, 1]);
            img.Draw(new CircleF(p2, 2.0f), new Bgr(100, 255, 100), -1);
         }

         //Emgu.CV.UI.ImageViewer.Show(img);
      }
        ///////////////////////////////////////////////////////////////////////////////////////////
        private void btnOpenTestImage_Click(object sender, EventArgs e)
        {
            // note: we effectively have to read the first XML file twice
                    // first, we read the file to get the number of rows (which is the same as the number of samples).
                    // the first time reading the file we can't get the data yet, since we don't know how many rows of data there are
                    // next, reinstantiate our classifications Matrix and training images Matrix with the correct number of rows
                    // then, read the file again and this time read the data into our resized classifications Matrix and training images Matrix

            Matrix<Single> mtxClassifications = new Matrix<Single>(1, 1);       // for the first time through, declare these to be 1 row by 1 column
            Matrix<Single> mtxTrainingImages = new Matrix<Single>(1, 1);        // we will resize these when we know the number of rows (i.e. number of training samples)

                    // possible chars we are interested in are digits 0 through 9
            List<int> intValidChars = new List<int> { (int)'0', (int)'1', (int)'2', (int)'3', (int)'4', (int)'5', (int)'6', (int)'7', (int)'8', (int)'9' };

            XmlSerializer xmlSerializer = new XmlSerializer(mtxClassifications.GetType());      // these variables are for
            StreamReader streamReader;                                                          // reading from the XML files

            try {
                streamReader = new StreamReader("classifications.xml");                     // attempt to open classifications file
            } catch(Exception ex) {                                                         // if error is encountered, show error and return
                txtInfo.Text = Environment.NewLine + txtInfo.Text + "unable to open 'classifications.xml', error:" + Environment.NewLine;
                txtInfo.Text = txtInfo.Text + ex.Message + Environment.NewLine + Environment.NewLine;
                return;
            }

                    // read from the classifications file the 1st time, this is only to get the number of rows, not the actual data
            mtxClassifications = (Matrix<Single>)xmlSerializer.Deserialize(streamReader);

            streamReader.Close();               // close the classifications XML file

            intNumberOfTrainingSamples = mtxClassifications.Rows;       // get the number of rows, i.e. the number of training samples

                    // now that we know the number of rows, reinstantiate classifications Matrix and training images Matrix with the actual number of rows
            mtxClassifications = new Matrix<Single>(intNumberOfTrainingSamples, 1);
            mtxTrainingImages = new Matrix<Single>(intNumberOfTrainingSamples, RESIZED_IMAGE_WIDTH * RESIZED_IMAGE_HEIGHT);

            try {
                streamReader = new StreamReader("classifications.xml");                 // attempt to reinitialize the stream reader
            } catch (Exception ex) {                                                    // if error is encountered, show error and return
                txtInfo.Text = Environment.NewLine + txtInfo.Text + "unable to open 'classifications.xml', error:" + Environment.NewLine;
                txtInfo.Text = txtInfo.Text + ex.Message + Environment.NewLine + Environment.NewLine;
                return;
            }

                    // read from the classifications file again, this time we can get the actual data
            mtxClassifications = (Matrix<Single>)xmlSerializer.Deserialize(streamReader);

            streamReader.Close();           // close the classifications XML file

            xmlSerializer = new XmlSerializer(mtxTrainingImages.GetType());     // reinstantiate file reading variables

            try {
                streamReader = new StreamReader("images.xml");
            } catch (Exception ex) {                                            // if error is encountered, show error and return
                txtInfo.Text = Environment.NewLine + txtInfo.Text + "unable to open 'images.xml', error:" + Environment.NewLine;
                txtInfo.Text = txtInfo.Text + ex.Message + Environment.NewLine + Environment.NewLine;
                return;
            }

            mtxTrainingImages = (Matrix<Single>)xmlSerializer.Deserialize(streamReader);        // read from training images file
            streamReader.Close();           // close the training images XML file

                    // train //////////////////////////////////////////////////////////

            KNearest kNearest = new KNearest();                                                 // instantiate KNN object
            kNearest.Train(mtxTrainingImages, mtxClassifications, null, false, 1,false);        // call to train

                    // test ///////////////////////////////////////////////////////////////////////

            DialogResult drChosenFile;

            drChosenFile = ofdOpenFile.ShowDialog();            // open file dialog

            if (drChosenFile != DialogResult.OK || ofdOpenFile.FileName == "") {            // if user chose Cancel or filename is blank . . .
                lblChosenFile.Text = "file not chosen";         // show error message on label
                return;                                         // and exit function
            }

            Image<Bgr, Byte> imgTestingNumbers;                 // this is the main input image

            try {
                imgTestingNumbers = new Image<Bgr, Byte>(ofdOpenFile.FileName);         // open image
            } catch(Exception ex) {                                                     // if error occurred
                lblChosenFile.Text = "unable to open image, error: " + ex.Message;      // show error message on label
                return;                                                                 // and exit function
            }

            if(imgTestingNumbers == null) {                         //if image could not be opened
                lblChosenFile.Text = "unable to open image";        // show error message on label
                return;                                             // and exit function
            }

            lblChosenFile.Text = ofdOpenFile.FileName;              // update label with file name

            Image<Gray, Byte> imgGrayscale;              //
            Image<Gray, Byte> imgBlurred;                // declare various images
            Image<Gray, Byte> imgThresh;                 //
            Image<Gray, Byte> imgThreshCopy;             //

            Contour<Point> contours;

            imgGrayscale = imgTestingNumbers.Convert<Gray, Byte>();         // convert to grayscale

            imgBlurred = imgGrayscale.SmoothGaussian(5);                    // blur

                                        // filter image from grayscale to black and white
            imgThresh = imgBlurred.ThresholdAdaptive(new Gray(255), ADAPTIVE_THRESHOLD_TYPE.CV_ADAPTIVE_THRESH_GAUSSIAN_C, THRESH.CV_THRESH_BINARY_INV, 11, new Gray(2));

            imgThreshCopy = imgThresh.Clone();          // make a copy of the thresh image, this in necessary b/c findContours modifies the image

                                        // get external countours only
            contours = imgThreshCopy.FindContours(CHAIN_APPROX_METHOD.CV_CHAIN_APPROX_SIMPLE, RETR_TYPE.CV_RETR_EXTERNAL);

            List<Contour<Point> > listOfContours = new List<Contour<Point> >();             // declare a list of contours and a list of valid contours,
            List<Contour<Point> > listOfValidContours = new List<Contour<Point> >();        // this is necessary for removing invalid contours and sorting from left to right

                                        // populate list of contours
            while(contours != null) {               // for each contour
                Contour<Point> contour = contours.ApproxPoly(contours.Perimeter * 0.0001);      // get the current contour, note that the lower the multiplier, the higher the precision
                listOfContours.Add(contour);                                                    // add to list of contours
                contours = contours.HNext;                                                      // move on to next contour
            }
                                        // this next loop removes the invalid contours
            foreach (Contour<Point> contour in listOfContours) {// for each contour
                if(ContourIsValid(contour)) {// if contour is valid
                    listOfValidContours.Add(contour);// add to list of valid contours
                }
            }

                        // sort contours from left to right
            listOfValidContours.Sort((oneContour, otherContour) => oneContour.BoundingRectangle.X.CompareTo(otherContour.BoundingRectangle.X));

            String strFinalString = "";             // declare final string, this will have the final number sequence by the end of the program

            foreach (Contour<Point> contour in listOfValidContours) {       // for each contour in list of valid contours
                Rectangle rect = contour.BoundingRectangle;                     // get the bounding rect
                imgTestingNumbers.Draw(rect, new Bgr(Color.Green), 2);          // draw green rect around the current char
                Image<Gray, Byte> imgROI = imgThresh.Copy(rect);                // get ROI image of bounding rect

                                        // resize image, this is necessary for recognition
                Image<Gray, Byte> imgROIResized = imgROI.Resize(RESIZED_IMAGE_WIDTH, RESIZED_IMAGE_HEIGHT, INTER.CV_INTER_LINEAR);

                Matrix<Single> mtxTemp = new Matrix<Single>(imgROIResized.Size);                                        // declare a Matrix of the same dimensions as the Image we are adding to the data structure of training images
                Matrix<Single> mtxTempReshaped = new Matrix<Single>(1, RESIZED_IMAGE_WIDTH * RESIZED_IMAGE_HEIGHT);     // declare a flattened (only 1 row) matrix of the same total size

                CvInvoke.cvConvert(imgROIResized, mtxTemp);             // convert Image to a Matrix of Singles with the same dimensions

                for(int intRow = 0; intRow < RESIZED_IMAGE_HEIGHT; intRow++){       // flatten Matrix into one row by RESIZED_IMAGE_WIDTH * RESIZED_IMAGE_HEIGHT number of columns
                    for(int intCol = 0; intCol < RESIZED_IMAGE_WIDTH; intCol++) {
                        mtxTempReshaped[0, (intRow * RESIZED_IMAGE_WIDTH) + intCol] = mtxTemp[intRow, intCol];
                    }
                }

                Single sngCurrentChar = kNearest.FindNearest(mtxTempReshaped, 1, null, null, null, null);       // finally we can call find_nearest !!!

                strFinalString = strFinalString + Convert.ToChar(Convert.ToInt32(sngCurrentChar));              // append current char to full string
            }   // end foreach

                        // show the full string
            txtInfo.Text = Environment.NewLine + Environment.NewLine + txtInfo.Text + "number read from image = " + strFinalString + Environment.NewLine;

            CvInvoke.cvShowImage("imgTestingNumbers", imgTestingNumbers);       // show input image with green boxes drawn around found digits
        }
Ejemplo n.º 8
0
        private Image<Bgr, Byte> knn()
        {
            int K = 10;
            int trainSampleCount = 150;
            int sigma = 60;

            #region Generate the training data and classes

            Matrix<float> trainData = new Matrix<float>(trainSampleCount, 2);
            Matrix<float> trainClasses = new Matrix<float>(trainSampleCount, 1);

            Image<Bgr, Byte> img = new Image<Bgr, byte>(500, 500);

            Matrix<float> sample = new Matrix<float>(1, 2);

            Matrix<float> trainData1 = trainData.GetRows(0, trainSampleCount / 3, 1);
            trainData1.GetCols(0, 1).SetRandNormal(new MCvScalar(100), new MCvScalar(sigma));
            trainData1.GetCols(1, 2).SetRandNormal(new MCvScalar(300), new MCvScalar(sigma));

            Matrix<float> trainData2 = trainData.GetRows(trainSampleCount / 3, 2 * trainSampleCount / 3, 1);
            trainData2.SetRandNormal(new MCvScalar(400), new MCvScalar(sigma));

            Matrix<float> trainData3 = trainData.GetRows(2 * trainSampleCount / 3, trainSampleCount, 1);
            trainData3.GetCols(0, 1).SetRandNormal(new MCvScalar(300), new MCvScalar(sigma));
            trainData3.GetCols(1, 2).SetRandNormal(new MCvScalar(100), new MCvScalar(sigma));

            Matrix<float> trainClasses1 = trainClasses.GetRows(0, trainSampleCount / 3, 1);
            trainClasses1.SetValue(1);
            Matrix<float> trainClasses2 = trainClasses.GetRows(trainSampleCount / 3, 2 * trainSampleCount / 3, 1);
            trainClasses2.SetValue(2);
            Matrix<float> trainClasses3 = trainClasses.GetRows(2 * trainSampleCount / 3, trainSampleCount, 1);
            trainClasses3.SetValue(3);

            #endregion

            Matrix<float> results, neighborResponses;
            results = new Matrix<float>(sample.Rows, 1);
            neighborResponses = new Matrix<float>(sample.Rows, K);
            //dist = new Matrix<float>(sample.Rows, K);

            //using (KNearest knn = new KNearest(trainData, trainClasses, null, false, K)) {
            using (KNearest knn = new KNearest()) {
                bool trained = knn.Train(trainData, trainClasses, null, false, K, false);

                for (int i = 0; i < img.Height; i++) {
                    for (int j = 0; j < img.Width; j++) {
                        sample.Data[0, 0] = j;
                        sample.Data[0, 1] = i;

                        //Matrix<float> nearestNeighbors = new Matrix<float>(K* sample.Rows, sample.Cols);
                        // estimates the response and get the neighbors' labels
                        float response = knn.FindNearest(sample, K, results, null, neighborResponses, null);

                        int accuracy = 0;
                        // compute the number of neighbors representing the majority
                        for (int k = 0; k < K; k++) {
                            if (neighborResponses.Data[0, k] == response)
                                accuracy++;
                        }
                        // highlight the pixel depending on the accuracy (or confidence)
                        //img[i, j] =
                        //response == 1 ?
                        //    (accuracy > 5 ? new Bgr(90, 0, 0) : new Bgr(90, 60, 0)) :
                        //    (accuracy > 5 ? new Bgr(0, 90, 0) : new Bgr(60, 90, 0));
                        img[i, j] =
                            response == 1 ? (accuracy > 5 ? new Bgr(90, 0, 0) : new Bgr(90, 30, 30)) :
                           response == 2 ? (accuracy > 5 ? new Bgr(0, 90, 0) : new Bgr(30, 90, 30)) :
                            (accuracy > 5 ? new Bgr(0, 0, 90) : new Bgr(30, 30, 90));
                    }
                }
                knn.Save(@"D:\Play Data\KNN训练数据");
            }

            // display the original training samples

            for (int i = 0; i < (trainSampleCount / 3); i++) {
                PointF p1 = new PointF(trainData1[i, 0], trainData1[i, 1]);
                img.Draw(new CircleF(p1, 2.0f), new Bgr(255, 100, 100), -1);
                PointF p2 = new PointF(trainData2[i, 0], trainData2[i, 1]);
                img.Draw(new CircleF(p2, 2.0f), new Bgr(100, 255, 100), -1);
                PointF p3 = new PointF(trainData3[i, 0], trainData3[i, 1]);
                img.Draw(new CircleF(p3, 2.0f), new Bgr(100, 100, 255), -1);
            }
            return img;
        }
Ejemplo n.º 9
0
        private void TestEmgu()
        {
            int K = 10;
            //int trainSampleCount = 100;
            int trainSampleCount = this.vectorTable[0].Length-1;
            int trainSampleColumns = this.vectorTable.Length - 2; //subtract two columns for the post id and IsImage
            int scalingRatio = 10;

            #region Generate the traning data and classes

            Matrix<float> trainData = new Matrix<float>(trainSampleColumns, trainSampleCount);
            Matrix<float> trainClasses = new Matrix<float>(trainSampleColumns, 1);

            Image<Bgr, Byte> img = new Image<Bgr, byte>(trainSampleCount, trainSampleCount);

            Matrix<float> sample = new Matrix<float>(1, trainSampleCount);

            for (int y = 1; y < this.vectorTable[0].Length - 1; y++) {
                for (int x = 2; x < this.vectorTable.Length - 1; x++) {
                    trainData.Data.SetValue(Int32.Parse(this.vectorTable[x][y])*scalingRatio,x-2,y-1);
                }
            }

            Matrix<float> trainData1 = trainData.GetRows(0, trainSampleColumns >> 1, 1);
            //trainData1.SetRandNormal(new MCvScalar(200), new MCvScalar(50));
            Matrix<float> trainData2 = trainData.GetRows(trainSampleColumns >> 1, trainSampleColumns, 1);
            //trainData2.SetRandNormal(new MCvScalar(300), new MCvScalar(50));

            Matrix<float> trainClasses1 = trainClasses.GetRows(0, trainSampleCount >> 1, 1);
            trainClasses1.SetValue(1);
            Matrix<float> trainClasses2 = trainClasses.GetRows(trainSampleCount >> 1, trainSampleCount, 1);
            trainClasses2.SetValue(2);
            #endregion

            Matrix<float> results, neighborResponses;
            results = new Matrix<float>(sample.Rows, 1);
            neighborResponses = new Matrix<float>(sample.Rows, K);
            //dist = new Matrix<float>(sample.Rows, K);

            KNearest knn = new KNearest(trainData, trainClasses, null, false, K);
            for (int i = 0; i < img.Height; i++) {
                for (int j = 0; j < img.Width; j++) {
                    sample.Data[0, 0] = j;
                    sample.Data[0, 1] = i;

                    //Matrix<float> nearestNeighbors = new Matrix<float>(K* sample.Rows, sample.Cols);
                    // estimates the response and get the neighbors' labels
                    float response = knn.FindNearest(sample, K, results, null, neighborResponses, null);

                    int accuracy = 0;
                    // compute the number of neighbors representing the majority
                    for (int k = 0; k < K; k++) {
                        if (neighborResponses.Data[0, k] == response)
                            accuracy++;
                    }
                    // highlight the pixel depending on the accuracy (or confidence)
                    img[i, j] =
                    response == 1 ?
                        (accuracy > 5 ? new Bgr(90, 0, 0) : new Bgr(90, 60, 0)) :
                        (accuracy > 5 ? new Bgr(0, 90, 0) : new Bgr(60, 90, 0));
                }
            }

            // display the original training samples
            for (int i = 0; i < (trainSampleCount >> 1); i++) {
                PointF p1 = new PointF(trainData1[i, 0], trainData1[i, 1]);
                img.Draw(new CircleF(p1, 2.0f), new Bgr(255, 100, 100), -1);
                PointF p2 = new PointF(trainData2[i, 0], trainData2[i, 1]);
                img.Draw(new CircleF(p2, 2.0f), new Bgr(100, 255, 100), -1);
            }

            //Emgu.CV.UI.ImageViewer.Show(img);
            Emgu.CV.UI.ImageViewer imgviewer = new Emgu.CV.UI.ImageViewer(img);
            imgviewer.Show();
        }
Ejemplo n.º 10
0
        //load knn training data
        private void init_KNN()
        {
            String folder = "C:\\KinectImage\\rightHand\\palm_small";
            String[] filePaths = Directory.GetFiles(folder);
            int BWImageWidth = 50;
            trainData = new Matrix<float>(trainSampleCount, BWImageWidth * BWImageWidth);
            trainClasses = new Matrix<float>(trainSampleCount, 1);

            int fileIdx = 0;
            int sampleIdx = 0;
            for (fileIdx = 0; fileIdx < filePaths.Length; fileIdx++)
            {
                Image<Gray, Byte> img = new Image<Gray, Byte>(filePaths[fileIdx]);

                for (int rowIdx = 0; rowIdx < BWImageWidth; rowIdx++)
                {
                    for (int colIdx = 0; colIdx < BWImageWidth; colIdx++)
                    {
                        //Console.WriteLine(filePaths[fileIdx]);
                        trainData[sampleIdx, BWImageWidth * rowIdx + colIdx] = (float)img[rowIdx, colIdx].MCvScalar.v0;
                        trainClasses[sampleIdx, 0] = 0.0f;
                        //Console.WriteLine((float)img[rowIdx, colIdx].MCvScalar.v0);
                    }
                }
                sampleIdx++;
            }

            //read fist
            folder = "C:\\KinectImage\\rightHand\\fist_small";
            filePaths = Directory.GetFiles(folder);
            for (fileIdx = 0; fileIdx < filePaths.Length; fileIdx++)
            {

                Image<Gray, Byte> img = new Image<Gray, Byte>(filePaths[fileIdx]);

                for (int rowIdx = 0; rowIdx < BWImageWidth; rowIdx++)
                {
                    for (int colIdx = 0; colIdx < BWImageWidth; colIdx++)
                    {
                        //Console.WriteLine(filePaths[fileIdx]);
                        trainData[sampleIdx, BWImageWidth * rowIdx + colIdx] = (float)img[rowIdx, colIdx].MCvScalar.v0;
                        trainClasses[sampleIdx, 0] = 1.0f;
                        //Console.WriteLine((float)img[rowIdx, colIdx].MCvScalar.v0);
                    }
                }
                sampleIdx++;
            }

            testSample = new Matrix<float>(1, BWImageWidth * BWImageWidth);
            results = new Matrix<float>(testSample.Rows, 1);
            neighborResponses = new Matrix<float>(testSample.Rows, K);
            knn = new KNearest(trainData, trainClasses, null, false, K);

            /*
            //testing
            folder = "C:\\KinectImage\\rightHand\\palm_small";
            filePaths = Directory.GetFiles(folder);

            for (fileIdx = 0; fileIdx < filePaths.Length; fileIdx++)
            {

                Image<Gray, Byte> img = new Image<Gray, Byte>(filePaths[fileIdx]);

                for (int rowIdx = 0; rowIdx < BWImageWidth; rowIdx++)
                {
                    for (int colIdx = 0; colIdx < BWImageWidth; colIdx++)
                    {
                         testSample[0, rowIdx*BWImageWidth + colIdx] = (float)img[rowIdx, colIdx].MCvScalar.v0;
                    }
                }
                float response = knn.FindNearest(testSample, K, results, null, neighborResponses, null);
                Console.WriteLine(response);
            }
             * */
        }
Ejemplo n.º 11
0
        public override bool Predict(List<ImageVector> Vectors, out double[] results)
        {
            Boolean forTesting = true;
            string[] cVectors = new string[Vectors.Count];
            for (int i = 0; i < Vectors.Count; i++)
                cVectors[i] = Classifier.ClassifyVector(Vectors[i]);

            Image<Bgr, Byte> img = new Image<Bgr, byte>(500, 500);
            Matrix<float> sample;
            Matrix<float> res, neighborResponses;
            Matrix<float> kNearestNeighbors = new Matrix<float>(_K,cVectors.Length); ;
            results = new double[cVectors.Length];
            res = new Matrix<float>(1,1);
            neighborResponses = new Matrix<float>(1, _K);
            //dist = new Matrix<float>(1, _K);

                //load knn repository
             if (forTesting)
                 _testingRepository.loadList();

             else
                _repository.loadList();

            //calculate proportion of false vs. true in repository
            int f,t;
            if (forTesting)
            {
                f = _testingRepository.VectorListFalse.Count;
                t = _testingRepository.VectorListTrue.Count;
            }
            else
            {
                f = _repository.VectorListFalse.Count;
                t = _repository.VectorListTrue.Count;
            }
                double proportion = t / f; //TODO:use this

            string[] cVectorsTrue, cVectorsFalse;
            if(forTesting)
                Classifier.Classify(_testingRepository.VectorListTrue, _testingRepository.VectorListFalse, out cVectorsTrue, out cVectorsFalse);
            else
                Classifier.Classify(_repository.VectorListTrue, _repository.VectorListFalse, out cVectorsTrue, out cVectorsFalse);

                // Push vectors to algorithm

                convertDataVectorsToMatrix(cVectorsTrue, cVectorsFalse, out _data, out _response);
                using (_knn = new KNearest(_data, _response, null, false, _K))
                {
                    //   }
                    int tr=0;

                    for (int i = 0; i < cVectors.Length; i++)
                    {
                        // Convert vector i to matrix
                        convertSampleToMatrix(cVectors[i], out sample);

                        //Matrix<float> nearestNeighbors = new Matrix<float>(K* sample.Rows, sample.Cols);

                        // estimates the response and get the neighbors' labels
                        try
                        {
                            float response = _knn.FindNearest(sample, _K, res, null, neighborResponses, null);
                            if (response == 1)
                            {
                                //System.Windows.Forms.MessageBox.Show("distance " + dist.ToString());
                                tr++;

                            }
                            double accuracy = 0;//grade of picture
                            double distance = 0;
                            //double power = 0;
                            //double good = 0;
                            // compute the number of neighbors representing the majority

                            int place = 0;
                            for (int k = 0; k < _K; k++)
                            {
                                if (neighborResponses.Data[0, k] == response)
                                    accuracy++;
                            }
                            if ((accuracy >= _K / 2))
                            {
                                if (MainForm.weight == true)
                                {
                                    distance = find_weight_distance(cVectors[place], sample, _data);
                                    place++;
                                }
                                else
                                {
                                    distance = find_distance(cVectors[place], sample, _data);
                                    place++;
                                }
                            }
                            else
                            {
                                distance = 1000;
                                place++;
                            }
                            results[i] = distance;
                            accuracy = 0;

                        }
                        catch (Exception e)
                        {
                            System.Windows.Forms.MessageBox.Show(e.Message);
                        }
                    }
                    //System.Windows.Forms.MessageBox.Show(tr+" true images found");
                }

            return true;
        }