Exemplo n.º 1
0
        private void Test_KNN()
        {
            Matrix <float> MData         = null;
            Matrix <int>   MClass        = null;
            List <int>     Predictions   = new List <int>();
            List <int>     ActualClasses = new List <int>();


            (MData, MClass) = FrameObj.DataToMatrix(TestD);

            for (int data = 0; data < MData.Rows; data++)
            {
                var prediction = KNN.Predict(MData.GetRow(data));
                Predictions.Add((int)prediction);
                ActualClasses.Add(MClass[data, 0]);
            }



            var    ConfMatrix = OB.ComputeConfusionMatrix(ActualClasses.ToArray(), Predictions.ToArray(), Datasets.Count);
            var    metrics    = OB.CalculateMetrics(ConfMatrix, ActualClasses.ToArray(), Predictions.ToArray());
            string results    = $"Test Samples {ActualClasses.Count} \n   Accuraccy = {metrics[0] * 100} % \n " +
                                $"Precission = {metrics[1] * 100} % \n  Recall = {metrics[2] * 100} %";

            Precision.Content = results;

            DataTable dataTable = new DataTable();
            var       collum    = OB.CM_Total.GetLength(0);
            var       row       = OB.CM_Total.GetLength(1);

            dataTable.Columns.Add(new DataColumn(""));

            for (var c = 0; c < collum; c++)
            {
                dataTable.Columns.Add(new DataColumn("Class" + c.ToString()));
            }

            for (var r = 0; r < row; r++)
            {
                var newRow = dataTable.NewRow();
                newRow[0] = "Class" + r;
                for (var c = 0; c < collum; c++)
                {
                    newRow[c + 1] = OB.CM_Total[r, c];
                }
                dataTable.Rows.Add(newRow);
            }


            confMat.CMat.ItemsSource = dataTable.DefaultView;

            foreach (var col in confMat.CMat.Columns)
            {
                col.Width = 100;
            }

            confMat.CMat.RowHeight = confMat.CMat.Height / (ConfMatrix.GetLength(1) + 2);
        }
Exemplo n.º 2
0
 static void train_knn(string fn)
 {
     //string fn = @"C:\projects\local\GradeChecker\GradeChecker\bin\Debug\report.json";
     try
     {
         var jss = new System.Web.Script.Serialization.JavaScriptSerializer();
         List <Dictionary <string, object> > datas = jss.Deserialize <List <Dictionary <string, object> > >(System.IO.File.ReadAllText(fn));
         string[]       keys = testMQ.Properties.Resources.keys.Split(new string[] { Environment.NewLine }, StringSplitOptions.RemoveEmptyEntries);
         Matrix <float> data;
         Matrix <int>   response;
         load_data(datas.ToArray(), keys, out data, out response);
         using (KNearest knn = new KNearest())
         {
             //SVMParams p = new SVMParams();
             //p.KernelType = Emgu.CV.ML.MlEnum.SVM_KERNEL_TYPE.LINEAR;
             //p.SVMType = Emgu.CV.ML.MlEnum.SVM_TYPE.C_SVC;
             //p.C = 1;
             //p.TermCrit = new MCvTermCriteria(100, 0.00001);
             //TrainData td = new TrainData(data, Emgu.CV.ML.MlEnum.DataLayoutType.RowSample, response);
             //bool ok = model.TrainAuto(td, 3);
             bool ok = knn.Train(data, Emgu.CV.ML.MlEnum.DataLayoutType.RowSample, response);
             if (ok)
             {
                 knn.Save("knn.xml");
                 Matrix <float> sample;
                 load_test_data(datas.ToArray(), keys, out sample);
                 float r = knn.Predict(sample);
             }
         }
     }
     catch (Exception) { }
 }
Exemplo n.º 3
0
        static void test_knn(string fn)
        {
            //string fn = @"C:\projects\local\GradeChecker\GradeChecker\bin\Debug\test.json";

            try
            {
                var jss = new System.Web.Script.Serialization.JavaScriptSerializer();
                List <Dictionary <string, object> > datas = jss.Deserialize <List <Dictionary <string, object> > >(System.IO.File.ReadAllText(fn));
                string[]       keys = testMQ.Properties.Resources.keys.Split(new string[] { Environment.NewLine }, StringSplitOptions.RemoveEmptyEntries);
                Matrix <float> data;
                Matrix <int>   response;
                load_data(datas.ToArray(), keys, out data, out response);
                using (KNearest model = new KNearest())
                {
                    model.Load("knn.xml");
                    for (int i = 0; i < data.Rows; i++)
                    {
                        float r = model.Predict(data.GetRow(i));
                        Dictionary <string, object> d = datas[i];
                        System.Console.WriteLine($"imei={d["imei"]}, VZW={d["VZW"]}, FD={grade_level[(int)r]}");
                    }
                }
            }
            catch (Exception) { }
        }
Exemplo n.º 4
0
        static void test_knn_1()
        {
            //Regex r = new Regex(@"^Color Temp: (\d+) K - Lux: (\d+) - R: (\d+) G: (\d+) B: (\d+) Rr: (\d+) Gr: (\d+) Br: (\d+) C: (\d+)\s*$");
            //string[] lines = System.IO.File.ReadAllLines(@"data\test.txt");
            //foreach(string s in lines)
            //{
            //    Match m = r.Match(s);
            //    if (m.Success)
            //    {
            //        if (m.Groups.Count > 9)
            //        {

            //        }
            //    }
            //}
            string s = "knn.xml";

            if (System.IO.File.Exists(s))
            {
                using (KNearest knn = new KNearest())
                {
                    knn.Load(s);
                    //bool ok = knn.Train(data, Emgu.CV.ML.MlEnum.DataLayoutType.RowSample, response);
                    Matrix <float> sample;
                    test_data(out sample);
                    float r = knn.Predict(sample);
                }
            }
            else
            {
                Matrix <float> data;
                Matrix <float> response;
                ReadMushroomData(out data, out response);

                //
                using (KNearest knn = new KNearest())
                {
                    knn.DefaultK     = 3;
                    knn.IsClassifier = true;
                    bool ok = knn.Train(data, Emgu.CV.ML.MlEnum.DataLayoutType.RowSample, response);
                    if (ok)
                    {
                        knn.Save("knn.xml");
                        //int cols = data.Cols;
                        //Matrix<float> sample = new Matrix<float>(1, cols);
                        Matrix <float> sample;
                        test_data(out sample);
                        float r = knn.Predict(sample);
                    }
                }
            }
        }
Exemplo n.º 5
0
        public int GetImageSimilarityFromKNN(KNearest knn, Mat sample, Rectangle roi)
        {
            using (Mat sample2 = new Mat(sample, roi)) {
                CvInvoke.CvtColor(sample2, sample2, ColorConversion.Bgr2Gray);
                CvInvoke.Threshold(sample2, sample2, 225, 255, ThresholdType.Binary);
                sample2.ConvertTo(sample2, DepthType.Cv32F);
                //sample2.CopyToBitmap().Save("ReferenceImages/Suits/_.bmp");

                using (Mat sample3 = sample2.Reshape(0, 1)) {
                    return((int)knn.Predict(sample3));                    //knn.FindNearest(sample, K, results, null, neighborResponses, null);
                }
            }
        }
Exemplo n.º 6
0
        }//end

        public static string recognizeCharsInPlate(Mat imgThresh, List <PossibleChar> listOfMatchingChars)
        {
            string strChars       = "";
            Mat    imgThreshColor = new Mat();

            listOfMatchingChars.Sort((firstChar, secondChar) => firstChar.boundingRect.X.CompareTo(secondChar.boundingRect.X));

            CvInvoke.CvtColor(imgThresh, imgThreshColor, ColorConversion.Gray2Bgr);

            foreach (PossibleChar currentChar in listOfMatchingChars)
            {
                CvInvoke.Rectangle(imgThreshColor, currentChar.boundingRect, SCALAR_GREEN, 2);

                Mat imgROItoBeCloned = new Mat(imgThresh, currentChar.boundingRect);

                Mat imgROI = imgROItoBeCloned.Clone();

                Mat imgROIResized = new Mat();

                CvInvoke.Resize(imgROI, imgROIResized, new Size(RESIZED_CHAR_IMAGE_WIDTH, RESIZED_CHAR_IMAGE_HEIGHT));

                Matrix <Single> mtxTemp = new Matrix <Single>(imgROIResized.Size);

                Matrix <Single> mtxTempReshaped = new Matrix <Single>(1, RESIZED_CHAR_IMAGE_WIDTH * RESIZED_CHAR_IMAGE_HEIGHT);

                imgROIResized.ConvertTo(mtxTemp, DepthType.Cv32F);

                for (int intRow = 0; intRow <= RESIZED_CHAR_IMAGE_HEIGHT - 1; intRow++)
                {
                    for (int intCol = 0; intCol <= RESIZED_CHAR_IMAGE_WIDTH - 1; intCol++)
                    {
                        mtxTempReshaped[0, (intRow * RESIZED_CHAR_IMAGE_WIDTH) + intCol] = mtxTemp[intRow, intCol];
                    }
                }

                Single sngCurrentChar;

                sngCurrentChar = kNearest.Predict(mtxTempReshaped);


                strChars = strChars + (char)(Convert.ToInt32(sngCurrentChar));
            }
            //CvInvoke.Imshow("10", imgThreshColor);

            return(strChars);
        }//end
Exemplo n.º 7
0
        public override string Evaluate(TaggedImage dataSample)
        {
            float[,] featureVector2D = new float[1, dataSample.FeatureVector.Count];

            List <float> featureVector = dataSample.FeatureVector;

            for (int j = 0; j < featureVector.Count; ++j)
            {
                featureVector2D[0, j] = featureVector[j];
            }

            Matrix <float> featureVectorMatrix = new Matrix <float>(featureVector2D);

            string res = TaggedImage.GetStringFromIndex((int)kNearest.Predict(featureVectorMatrix));

            return(res);
        }
Exemplo n.º 8
0
        static void test_knn()
        {
            Matrix <float> data;
            Matrix <int>   response;

            ReadColorData(out data, out response);
            using (KNearest knn = new KNearest())
            {
                knn.DefaultK     = 3;
                knn.IsClassifier = true;
                bool ok = knn.Train(data, Emgu.CV.ML.MlEnum.DataLayoutType.RowSample, response);
                if (ok)
                {
                    knn.Save("knn.xml");
                    //int cols = data.Cols;
                    //Matrix<float> sample = new Matrix<float>(1, cols);
                    Matrix <float> sample;
                    test_data(out sample);
                    float r = knn.Predict(sample);
                }
            }
        }
Exemplo n.º 9
0
        public void TestKNearest()
        {
            int K = 10;
            int trainSampleCount = 100;

            #region Generate the training data and classes

            Matrix <float> trainData    = new Matrix <float>(trainSampleCount, 2);
            Matrix <float> trainClasses = new Matrix <float>(trainSampleCount, 1);

            Image <Bgr, Byte> img = new Image <Bgr, byte>(500, 500);

            Matrix <float> sample = new Matrix <float>(1, 2);

            Matrix <float> trainData1 = trainData.GetRows(0, trainSampleCount >> 1, 1);
            trainData1.SetRandNormal(new MCvScalar(200), new MCvScalar(50));
            Matrix <float> trainData2 = trainData.GetRows(trainSampleCount >> 1, trainSampleCount, 1);
            trainData2.SetRandNormal(new MCvScalar(300), new MCvScalar(50));

            Matrix <float> trainClasses1 = trainClasses.GetRows(0, trainSampleCount >> 1, 1);
            trainClasses1.SetValue(1);
            Matrix <float> trainClasses2 = trainClasses.GetRows(trainSampleCount >> 1, trainSampleCount, 1);
            trainClasses2.SetValue(2);

            #endregion

            Matrix <float> results, neighborResponses;
            results           = new Matrix <float>(sample.Rows, 1);
            neighborResponses = new Matrix <float>(sample.Rows, K);
            //dist = new Matrix<float>(sample.Rows, K);

            using (KNearest knn = new KNearest())
            {
                knn.DefaultK     = K;
                knn.IsClassifier = true;
                knn.Train(trainData, MlEnum.DataLayoutType.RowSample, trainClasses);
                //ParamDef[] defs =  knn.GetParams();


                for (int i = 0; i < img.Height; i++)
                {
                    for (int j = 0; j < img.Width; j++)
                    {
                        sample.Data[0, 0] = j;
                        sample.Data[0, 1] = i;

                        // estimates the response and get the neighbors' labels
                        float response = knn.Predict(sample);
                        //knn.FindNearest(sample, K, results, null, neighborResponses, null);

                        int accuracy = 0;
                        // compute the number of neighbors representing the majority
                        for (int k = 0; k < K; k++)
                        {
                            if (neighborResponses.Data[0, k] == response)
                            {
                                accuracy++;
                            }
                        }
                        // highlight the pixel depending on the accuracy (or confidence)
                        img[i, j] =
                            response == 1
                                ? (accuracy > 5 ? new Bgr(90, 0, 0) : new Bgr(90, 40, 0))
                                : (accuracy > 5 ? new Bgr(0, 90, 0) : new Bgr(40, 90, 0));
                    }
                }

                String knnModelStr;
                //save stat model to string
                using (FileStorage fs = new FileStorage(".yml", FileStorage.Mode.Write | FileStorage.Mode.Memory))
                {
                    knn.Write(fs, "knn");
                    knnModelStr = fs.ReleaseAndGetString();
                }

                KNearest knn2 = new KNearest();
                knn2.LoadFromString(knnModelStr, "knn");

                String   knnModelStr2 = knn.SaveToString();
                KNearest knn3         = new KNearest();
                knn3.LoadFromString(knnModelStr2);

#if !NETFX_CORE
                String fileName = "knnModel.xml";
                knn.Save(fileName);
                String text = File.ReadAllText(fileName);
#endif
            }

            // display the original training samples
            for (int i = 0; i < (trainSampleCount >> 1); i++)
            {
                PointF p1 = new PointF(trainData1[i, 0], trainData1[i, 1]);
                img.Draw(new CircleF(p1, 2.0f), new Bgr(255, 100, 100), -1);
                PointF p2 = new PointF(trainData2[i, 0], trainData2[i, 1]);
                img.Draw(new CircleF(p2, 2.0f), new Bgr(100, 255, 100), -1);
            }

            //Emgu.CV.UI.ImageViewer.Show(img);
        }
Exemplo n.º 10
0
        public static KeyValuePair <string, bool> RecognizeText(Mat imgInput)
        {
            VectorOfVectorOfPoint contours = new VectorOfVectorOfPoint();

            List <ContoursWithData> listOfContoursWithData = new List <ContoursWithData>();          // declare a list of contours with data

            CvInvoke.FindContours(imgInput, contours, null, RetrType.External, ChainApproxMethod.ChainApproxSimple);

            //imgInput.Save(@"D:\Visual Studio Projects\KNNtrain\Imagini\4\" + i + ".jpg");
            //i++;

            // populate list of contours with data
            for (int i = 0; i <= contours.Size - 1; i++)                   // for each contour
            {
                if ((CvInvoke.ContourArea(contours[i]) > MIN_CONTOUR_AREA))
                {
                    ContoursWithData contourWithData = new ContoursWithData();                               // declare new contour with data
                    contourWithData.contour           = contours[i];                                         // populate contour member variable
                    contourWithData.boundingRectangle = CvInvoke.BoundingRectangle(contourWithData.contour); // calculate bounding rectangle
                    contourWithData.dblArea           = CvInvoke.ContourArea(contourWithData.contour);       // calculate area

                    if (contourWithData.checkIfContourIsValid())
                    {
                        listOfContoursWithData.Add(contourWithData);// add to list of contours with data
                    }
                    else
                    {
                        if (contourWithData.boundingRectangle.Width > Properties.Settings.Default.boundingRectangleWidthMax)
                        {
                            Mat imgROItoBeCloned = new Mat(imgInput, contourWithData.boundingRectangle);

                            Mat imgROI = imgROItoBeCloned.Clone();

                            Rectangle rectangleFirst  = new Rectangle(0, 0, contourWithData.boundingRectangle.Width / 2, contourWithData.boundingRectangle.Height);
                            Rectangle rectangleSecond = new Rectangle(contourWithData.boundingRectangle.Width / 2, 0, contourWithData.boundingRectangle.Width / 2, contourWithData.boundingRectangle.Height);

                            Mat firstImage  = new Mat(imgROI, rectangleFirst);
                            Mat secondImage = new Mat(imgROI, rectangleSecond);

                            VectorOfVectorOfPoint contoursSplitImage = new VectorOfVectorOfPoint();

                            CvInvoke.FindContours(firstImage, contoursSplitImage, null, RetrType.External, ChainApproxMethod.ChainApproxSimple);

                            for (int j = 0; j <= contoursSplitImage.Size - 1; j++)                   // for each contour
                            {
                                if ((CvInvoke.ContourArea(contoursSplitImage[j]) > MIN_CONTOUR_AREA))
                                {
                                    ContoursWithData contourWithDataFirstImage = new ContoursWithData();                                           // declare new contour with data
                                    contourWithDataFirstImage.contour             = contoursSplitImage[j];                                         // populate contour member variable
                                    contourWithDataFirstImage.boundingRectangle   = CvInvoke.BoundingRectangle(contourWithDataFirstImage.contour); // calculate bounding rectangle
                                    contourWithDataFirstImage.boundingRectangle.X = contourWithData.boundingRectangle.X;
                                    contourWithDataFirstImage.boundingRectangle.Y = contourWithData.boundingRectangle.Y;
                                    contourWithDataFirstImage.dblArea             = CvInvoke.ContourArea(contourWithDataFirstImage.contour);     // calculate area

                                    if (contourWithDataFirstImage.checkIfContourIsValid())
                                    {
                                        listOfContoursWithData.Add(contourWithDataFirstImage);// add to list of contours with data
                                    }
                                }
                            }

                            contoursSplitImage = new VectorOfVectorOfPoint();

                            CvInvoke.FindContours(secondImage, contoursSplitImage, null, RetrType.External, ChainApproxMethod.ChainApproxSimple);

                            for (int j = 0; j <= contoursSplitImage.Size - 1; j++)                   // for each contour
                            {
                                if ((CvInvoke.ContourArea(contoursSplitImage[j]) > MIN_CONTOUR_AREA))
                                {
                                    ContoursWithData contourWithDataSecondImage = new ContoursWithData();                                            // declare new contour with data
                                    contourWithDataSecondImage.contour             = contoursSplitImage[j];                                          // populate contour member variable
                                    contourWithDataSecondImage.boundingRectangle   = CvInvoke.BoundingRectangle(contourWithDataSecondImage.contour); // calculate bounding rectangle
                                    contourWithDataSecondImage.boundingRectangle.X = contourWithData.boundingRectangle.X + contourWithData.boundingRectangle.Width / 2;
                                    contourWithDataSecondImage.boundingRectangle.Y = contourWithData.boundingRectangle.Y;
                                    contourWithDataSecondImage.dblArea             = CvInvoke.ContourArea(contourWithDataSecondImage.contour);     // calculate area

                                    if (contourWithDataSecondImage.checkIfContourIsValid())
                                    {
                                        listOfContoursWithData.Add(contourWithDataSecondImage);// add to list of contours with data
                                    }
                                }
                            }
                        }
                    }
                }
            }

            float averageLocationY = 0;
            float sumLocationY     = 0;

            foreach (var item in listOfContoursWithData)
            {
                sumLocationY += item.boundingRectangle.Y + item.boundingRectangle.Height / 2;
            }

            averageLocationY = sumLocationY / listOfContoursWithData.Count;

            float minLocationY = averageLocationY - 0.15f * averageLocationY;
            float maxLocationY = averageLocationY + 0.15f * averageLocationY;

            listOfContoursWithData.RemoveAll(x => minLocationY > x.boundingRectangle.Y + x.boundingRectangle.Height / 2 || maxLocationY < x.boundingRectangle.Y + x.boundingRectangle.Height / 2);

            // sort contours with data from left to right
            listOfContoursWithData.Sort((oneContourWithData, otherContourWithData) => oneContourWithData.boundingRectangle.X.CompareTo(otherContourWithData.boundingRectangle.X));

            string strFinalString = "";                                                                           // declare final string, this will have the final number sequence by the end of the program

            foreach (ContoursWithData contourWithData in listOfContoursWithData)                                  // for each contour in list of valid contours
            {
                CvInvoke.Rectangle(imgInput, contourWithData.boundingRectangle, new MCvScalar(200, 0.0, 0.0), 2); // draw green rect around the current char

                Mat imgROItoBeCloned = new Mat(imgInput, contourWithData.boundingRectangle);                      // get ROI image of bounding rect

                Mat imgROI = imgROItoBeCloned.Clone();                                                            // clone ROI image so we don't change original when we resize

                Mat imgROIResized = new Mat();

                // resize image, this is necessary for char recognition
                CvInvoke.Resize(imgROI, imgROIResized, new System.Drawing.Size(RESIZED_IMAGE_WIDTH, RESIZED_IMAGE_HEIGHT));

                // declare a Matrix of the same dimensions as the Image we are adding to the data structure of training images
                Matrix <float> mtxTemp = new Matrix <float>(imgROIResized.Size);

                // declare a flattened (only 1 row) matrix of the same total size
                Matrix <float> mtxTempReshaped = new Matrix <float>(1, RESIZED_IMAGE_WIDTH * RESIZED_IMAGE_HEIGHT);

                imgROIResized.ConvertTo(mtxTemp, DepthType.Cv32F);                 // convert Image to a Matrix of Singles with the same dimensions

                for (int intRow = 0; intRow <= RESIZED_IMAGE_HEIGHT - 1; intRow++) // flatten Matrix into one row by RESIZED_IMAGE_WIDTH * RESIZED_IMAGE_HEIGHT number of columns
                {
                    for (int intCol = 0; intCol <= RESIZED_IMAGE_WIDTH - 1; intCol++)
                    {
                        mtxTempReshaped[0, (intRow * RESIZED_IMAGE_WIDTH) + intCol] = mtxTemp[intRow, intCol];
                    }
                }

                float sngCurrentChar;

                sngCurrentChar = kNearest.Predict(mtxTempReshaped);                        // finally we can call Predict !!!

                strFinalString = strFinalString + (char)(Convert.ToInt32(sngCurrentChar)); // append current char to full string of chars
            }

            bool licensePlateRegex = LicensePlateRegex.MatchRegex(strFinalString);

            return(new KeyValuePair <string, bool>(strFinalString, licensePlateRegex));
        }
Exemplo n.º 11
0
        ///''''''''''''''''''''''''''''''''''''''''''''''''''''''''''''''''''''''''''''''''''''''''''''
        //this is where we apply the actual char recognition
        public string recognizeCharsInPlate(Mat imgThresh, List <PossibleChar> listOfMatchingChars)
        {
            string strChars = "";
            //this will be the return value, the chars in the lic plate

            Mat imgThreshColor = new Mat();

            listOfMatchingChars.Sort((oneChar, otherChar) => oneChar.boundingRect.X.CompareTo(otherChar.boundingRect.X));
            //sort chars from left to right

            CvInvoke.CvtColor(imgThresh, imgThreshColor, ColorConversion.Gray2Bgr);

            //for each char in plate
            foreach (PossibleChar currentChar in listOfMatchingChars)
            {
                CvInvoke.Rectangle(imgThreshColor, currentChar.boundingRect, SCALAR_GREEN, 2);
                //draw green box around the char

                Mat imgROItoBeCloned = new Mat(imgThresh, currentChar.boundingRect);
                //get ROI image of bounding rect

                Mat imgROI = imgROItoBeCloned.Clone();
                //clone ROI image so we don't change original when we resize

                Mat imgROIResized = new Mat();

                //resize image, this is necessary for char recognition
                CvInvoke.Resize(imgROI, imgROIResized, new Size(RESIZED_CHAR_IMAGE_WIDTH, RESIZED_CHAR_IMAGE_HEIGHT));

                //declare a Matrix of the same dimensions as the Image we are adding to the data structure of training images
                Matrix <float> mtxTemp = new Matrix <float>(imgROIResized.Size);

                //declare a flattened (only 1 row) matrix of the same total size
                Matrix <float> mtxTempReshaped = new Matrix <float>(1, RESIZED_CHAR_IMAGE_WIDTH * RESIZED_CHAR_IMAGE_HEIGHT);

                imgROIResized.ConvertTo(mtxTemp, DepthType.Cv32F);
                //convert Image to a Matrix of Singles with the same dimensions

                //flatten Matrix into one row by RESIZED_IMAGE_WIDTH * RESIZED_IMAGE_HEIGHT number of columns
                for (int intRow = 0; intRow <= RESIZED_CHAR_IMAGE_HEIGHT - 1; intRow++)
                {
                    for (int intCol = 0; intCol <= RESIZED_CHAR_IMAGE_WIDTH - 1; intCol++)
                    {
                        mtxTempReshaped[0, (intRow * RESIZED_CHAR_IMAGE_WIDTH) + intCol] = mtxTemp[intRow, intCol];
                    }
                }

                float sngCurrentChar = 0;

                sngCurrentChar = kNearest.Predict(mtxTempReshaped);
                //finally we can call Predict !!!

                strChars = strChars + (char)sngCurrentChar;
                //append current char to full string of chars
            }

            // show steps '''''''''''''''''''''''''''''''''
            if ((frm.cbShowSteps.Checked == true))
            {
                CvInvoke.Imshow("10", imgThreshColor);
            }
            // show steps '''''''''''''''''''''''''''''''''''''''''''''''''''''''''''''''''''''''

            return(strChars);
            //return result
        }
Exemplo n.º 12
0
        static void check_device_color(System.Collections.Specialized.StringDictionary args)
        {
            System.Console.WriteLine($"Open COM port: {args["port"]}.");
            //1.open serial port
            try
            {
                _port = new System.IO.Ports.SerialPort(args["port"]);
                //_port = new SerialPort(args["port"], 9600);
                _port.BaudRate      = 9600;
                _port.Parity        = Parity.None;
                _port.StopBits      = StopBits.One;
                _port.DataBits      = 8;
                _port.Handshake     = Handshake.None;
                _port.RtsEnable     = true;
                _port.DtrEnable     = true;
                _port.ReadTimeout   = 1000;
                _port.WriteTimeout  = 1000;
                _port.DataReceived += _port_DataReceived;
                _port.Open();
            }
            catch (Exception)
            {
                _port = null;
                System.Console.WriteLine($"Fail to open COM port: {args["port"]}.");
                goto exit;
            }

            DateTime _start = DateTime.Now;
            bool     done   = false;

            System.Console.WriteLine($"Waiting for sensor ready.");
            //2.wait for sensor ready
            while (!done)
            {
                string s = get_data();
                Match  m = Regex.Match(s, "Found sensor", RegexOptions.None, Regex.InfiniteMatchTimeout);
                if (m.Success)
                {
                    System.Console.WriteLine($"Sensor is ready.");
                    done = true;
                }
                if ((DateTime.Now - _start).TotalSeconds > 10)
                {
                    break;
                }
            }
            if (!done)
            {
                System.Console.WriteLine($"Sensor is not ready.");
                goto exit;
            }

            Regex r = new Regex(@"^Color Temp: (\d+) K - Lux: (\d+) - R: (\d+) G: (\d+) B: (\d+) C: (\d+)\s*$");

            //3.turn off led
            System.Console.WriteLine($"Trun off LED.");
            _port.Write(new byte[] { 0x00 }, 0, 1);

            System.Console.WriteLine($"Read data for white noise.");
            System.Console.WriteLine($"Please remove device from sensor, and press any key to continue and q to quit.");
            ConsoleKeyInfo k = System.Console.ReadKey();

            if (k.KeyChar == 'q' || k.KeyChar == 'q')
            {
                goto exit;
            }
            //4.read data for white noise
            int samples = 10;

            int[,] white_noise = new int[samples, 6];
            System.Console.WriteLine($"Read {samples} sample data for white noise.");
            done = false;
            int i = 0;

            int[] white_noise_lux = new int[samples];
            int[] white_noise_c   = new int[samples];
            while (!done && i < samples)
            {
                System.Threading.Thread.Sleep(1000);
                string s = get_data();
                System.Console.WriteLine($"White noise data: {s}");
                Match m = r.Match(s);
                if (m.Success && m.Groups.Count > 6)
                {
                    white_noise[i, 0]  = Int32.Parse(m.Groups[1].Value);
                    white_noise[i, 1]  = Int32.Parse(m.Groups[2].Value);
                    white_noise[i, 2]  = Int32.Parse(m.Groups[3].Value);
                    white_noise[i, 3]  = Int32.Parse(m.Groups[4].Value);
                    white_noise[i, 4]  = Int32.Parse(m.Groups[5].Value);
                    white_noise[i, 5]  = Int32.Parse(m.Groups[6].Value);
                    white_noise_lux[i] = white_noise[i, 1];
                    white_noise_c[i]   = white_noise[i, 5];
                    i++;
                }
            }
            System.Console.WriteLine($"Complete to sample data for white noise.");
            // MeanStandardDeviation
            Tuple <double, double> wn_lux = MathNet.Numerics.Statistics.ArrayStatistics.MeanStandardDeviation(white_noise_lux);
            Tuple <double, double> wn_c   = MathNet.Numerics.Statistics.ArrayStatistics.MeanStandardDeviation(white_noise_c);

            System.Console.WriteLine($"White noise. mean of lux={wn_lux.Item1}, stddev={wn_lux.Item2}");
            System.Console.WriteLine($"White noise. mean of C={wn_c.Item1}, stddev={wn_c.Item2}");

            // load existing data for knn
            System.Console.WriteLine("Load training data.");
            KNearest knn = new KNearest();

            done = false;
            {
                Matrix <float> trained_data;
                Matrix <int>   response;
                ReadColorData(out trained_data, out response);
                //using (KNearest knn = new KNearest())
                {
                    knn.DefaultK     = 3;
                    knn.IsClassifier = true;
                    bool ok = knn.Train(trained_data, Emgu.CV.ML.MlEnum.DataLayoutType.RowSample, response);
                    if (ok)
                    {
                        System.Console.WriteLine("Load training data Success.");
                        done = true;
                        //knn.Save("knn.xml");
                        //int cols = data.Cols;
                        //Matrix<float> sample = new Matrix<float>(1, cols);
                        //Matrix<float> sample;
                        //test_data(out sample);
                        //float r = knn.Predict(sample);
                    }
                }
            }
            if (!done)
            {
                System.Console.WriteLine("Fail to load training data .");
                goto exit;
            }

            string data = "";

            done = false;
            System.Console.WriteLine($"Check device color. please place devices, press q to quit.");
            //List<int[]> color_data = new List<int[]>();
            int device_stage = 0;

            while (!done)
            {
                System.Threading.Thread.Sleep(1000);
                if (System.Console.KeyAvailable)
                {
                    k = System.Console.ReadKey();
                    if (k.KeyChar == 'q' || k.KeyChar == 'q')
                    {
                        done = true;
                        continue;
                    }
                }
                //string data;
                data = get_data();
                Match m = r.Match(data);
                if (m.Success)
                {
                    //System.Console.WriteLine($"Data: {data}");
                    if (m.Groups.Count > 6)
                    {
                        if (device_stage == 0)
                        {
                            // wiat for device in place,
                            // get lux and c
                            int    lux = Int32.Parse(m.Groups[2].Value);
                            int    c   = Int32.Parse(m.Groups[6].Value);
                            double r1  = (wn_lux.Item1 - lux) / wn_lux.Item1;
                            double r2  = (wn_c.Item1 - c) / wn_c.Item1;
                            if (r1 > 0.5 && r2 > 0.5)
                            {
                                // device in place
                                System.Console.WriteLine($"Device In-Place.");
                                device_stage = 1;
                            }
                        }
                        else if (device_stage == 1)
                        {
                            // device in-place
                            // led on.
                            System.Console.WriteLine($"Turn On LED .");
                            _port.Write(new byte[] { 0xff }, 0, 1);
                            device_stage = 2;
                            System.Threading.Thread.Sleep(2000);
                        }
                        else if (device_stage == 2)
                        {
                            System.Console.WriteLine($"Color Data: {data}");
                            // save color data
                            //int[] c = new int[6];
                            //c[0] = Int32.Parse(m.Groups[1].Value);
                            //c[1] = Int32.Parse(m.Groups[2].Value);
                            //c[2] = Int32.Parse(m.Groups[3].Value);
                            //c[3] = Int32.Parse(m.Groups[4].Value);
                            //c[4] = Int32.Parse(m.Groups[5].Value);
                            //c[5] = Int32.Parse(m.Groups[6].Value);
                            //color_data.Add(c);
                            // predict device color
                            string         s = parse_color_data(data);
                            Matrix <float> sample;
                            test_data(out sample, s);
                            float idx = knn.Predict(sample);
                            System.Console.WriteLine($"Predict: device color idx is {idx}");
                            device_stage = 3;
                        }
                        else if (device_stage == 3)
                        {
                            // device in-place
                            // led on.
                            System.Console.WriteLine($"Turn Off LED .");
                            _port.Write(new byte[] { 0x00 }, 0, 1);
                            device_stage = 4;
                            System.Threading.Thread.Sleep(1000);
                        }
                        else if (device_stage == 4)
                        {
                            System.Console.WriteLine($"Please remove device and place another one.");
                            // wiat for device remove,
                            // get lux and c
                            int    lux = Int32.Parse(m.Groups[2].Value);
                            int    c   = Int32.Parse(m.Groups[6].Value);
                            double r1  = (wn_lux.Item1 - lux) / wn_lux.Item1;
                            double r2  = (wn_c.Item1 - c) / wn_c.Item1;
                            if (r1 < 0.2 && r2 < 0.2)
                            {
                                // device in place
                                System.Console.WriteLine($"Device removed.");
                                device_stage = 0;
                            }
                        }
                        else
                        {
                        }
                    }
                }
            }
            //5.press any key to continue to read device color
            //6.place device
            //7.wait for device in-place
            //8.read data for device color
            //9.wait for device removal
            //10.press 'q' to quit or go to 7.
            //11.done.
exit:
            if (_port != null)
            {
                if (_port.IsOpen)
                {
                    _port.Write(new byte[] { 0x00 }, 0, 1);
                    _port.Close();
                }
            }
        }
 /// Predict gesture from sample
 public static Gestures Predict(Matrix <float> sample)
 {
     return((Gestures)knn.Predict(sample));
 }
Exemplo n.º 14
0
        public void TestKNearest()
        {
            int K = 10;
            int trainSampleCount = 100;

            #region Generate the training data and classes

            Matrix <float> trainData    = new Matrix <float>(trainSampleCount, 2);
            Matrix <float> trainClasses = new Matrix <float>(trainSampleCount, 1);

            Image <Bgr, Byte> img = new Image <Bgr, byte>(500, 500);

            Matrix <float> sample = new Matrix <float>(1, 2);

            Matrix <float> trainData1 = trainData.GetRows(0, trainSampleCount >> 1, 1);
            trainData1.SetRandNormal(new MCvScalar(200), new MCvScalar(50));
            Matrix <float> trainData2 = trainData.GetRows(trainSampleCount >> 1, trainSampleCount, 1);
            trainData2.SetRandNormal(new MCvScalar(300), new MCvScalar(50));

            Matrix <float> trainClasses1 = trainClasses.GetRows(0, trainSampleCount >> 1, 1);
            trainClasses1.SetValue(1);
            Matrix <float> trainClasses2 = trainClasses.GetRows(trainSampleCount >> 1, trainSampleCount, 1);
            trainClasses2.SetValue(2);
            #endregion

            Matrix <float> results, neighborResponses;
            results           = new Matrix <float>(sample.Rows, 1);
            neighborResponses = new Matrix <float>(sample.Rows, K);
            //dist = new Matrix<float>(sample.Rows, K);

            using (KNearest knn = new KNearest())
            {
                knn.DefaultK     = K;
                knn.IsClassifier = true;
                knn.Train(trainData, MlEnum.DataLayoutType.RowSample, trainClasses);
                //ParamDef[] defs =  knn.GetParams();
                //TODO: find out when knn.save will be implemented
                //knn.Save("knn.xml");

                for (int i = 0; i < img.Height; i++)
                {
                    for (int j = 0; j < img.Width; j++)
                    {
                        sample.Data[0, 0] = j;
                        sample.Data[0, 1] = i;

                        // estimates the response and get the neighbors' labels
                        float response = knn.Predict(sample); //knn.FindNearest(sample, K, results, null, neighborResponses, null);

                        int accuracy = 0;
                        // compute the number of neighbors representing the majority
                        for (int k = 0; k < K; k++)
                        {
                            if (neighborResponses.Data[0, k] == response)
                            {
                                accuracy++;
                            }
                        }
                        // highlight the pixel depending on the accuracy (or confidence)
                        img[i, j] =
                            response == 1 ?
                            (accuracy > 5 ? new Bgr(90, 0, 0) : new Bgr(90, 40, 0)) :
                            (accuracy > 5 ? new Bgr(0, 90, 0) : new Bgr(40, 90, 0));
                    }
                }
            }

            // display the original training samples
            for (int i = 0; i < (trainSampleCount >> 1); i++)
            {
                PointF p1 = new PointF(trainData1[i, 0], trainData1[i, 1]);
                img.Draw(new CircleF(p1, 2.0f), new Bgr(255, 100, 100), -1);
                PointF p2 = new PointF(trainData2[i, 0], trainData2[i, 1]);
                img.Draw(new CircleF(p2, 2.0f), new Bgr(100, 255, 100), -1);
            }

            //Emgu.CV.UI.ImageViewer.Show(img);
        }
Exemplo n.º 15
0
        //Recognize a single digit.
        public int Recognize(Mat img)
        {
            const int RESIZED_IMAGE_WIDTH  = 10;
            const int RESIZED_IMAGE_HEIGHT = 10;

            int cres = '?';

            var mtxClassifications         = _responses;
            int intNumberOfTrainingSamples = mtxClassifications.Rows;

            mtxClassifications = new Matrix <float>(447, 1);
            var mtxTrainingImages = new Matrix <float>(447, 100);

            //TODO:
            mtxTrainingImages = _samples;

            // train
            KNearest kNearest = new KNearest();

            kNearest.DefaultK = 1;
            kNearest.Train(mtxTrainingImages, Emgu.CV.ML.MlEnum.DataLayoutType.RowSample, mtxClassifications);

            Mat imgTestingNumbers = img;

            //declare various images
            Mat imgGrayscale  = new Mat();
            Mat imgBlurred    = new Mat();
            Mat imgThresh     = new Mat();
            Mat imgThreshCopy = new Mat();

            //convert to grayscale
            CvInvoke.CvtColor(imgTestingNumbers, imgGrayscale, ColorConversion.Bgr2Gray);

            //blur
            CvInvoke.GaussianBlur(imgGrayscale, imgBlurred, new Size(5, 5), 0);

            //threshold image from grayscale to black and white
            CvInvoke.AdaptiveThreshold(imgBlurred, imgThresh, 255.0, AdaptiveThresholdType.GaussianC, ThresholdType.BinaryInv, 11, 2.0);

            //make a copy of the thresh image, this in necessary b/c findContours modifies the image
            imgThreshCopy = imgThresh.Clone();

            var contours = new VectorOfVectorOfPoint();

            //get external countours only
            CvInvoke.FindContours(imgThreshCopy, contours, null, RetrType.External, ChainApproxMethod.ChainApproxSimple);

            //declare a list of contours with data
            var listOfContoursWithData = new List <ContourWithData>();

            //populate list of contours with data
            //for each contour
            for (int i = 0; i <= contours.Size - 1; i++)
            {
                //declare new contour with data
                ContourWithData contourWithData = new ContourWithData();
                //populate contour member variable
                contourWithData.contour = contours[i];
                //calculate bounding rectangle
                contourWithData.boundingRect = CvInvoke.BoundingRectangle(contourWithData.contour);
                //calculate area
                contourWithData.dblArea = CvInvoke.ContourArea(contourWithData.contour);

                //if contour with data is valid
                if ((contourWithData.CheckIfContourIsValid()))
                {
                    //add to list of contours with data
                    listOfContoursWithData.Add(contourWithData);
                }
            }
            //sort contours with data from left to right
            listOfContoursWithData.Sort((oneContourWithData, otherContourWithData) => oneContourWithData.boundingRect.X.CompareTo(otherContourWithData.boundingRect.X));

            //declare final string, this will have the final number sequence by the end of the program
            string strFinalString = "";

            //for each contour in list of valid contours
            foreach (ContourWithData contourWithData in listOfContoursWithData)
            {
                //draw green rect around the current char
                CvInvoke.Rectangle(imgTestingNumbers, contourWithData.boundingRect, new MCvScalar(0.0, 255.0, 0.0), 2);

                //get ROI image of bounding rect
                Mat imgROItoBeCloned = new Mat(imgThresh, contourWithData.boundingRect);

                //clone ROI image so we don't change original when we resize
                Mat imgROI = imgROItoBeCloned.Clone();

                Mat imgROIResized = new Mat();

                //resize image, this is necessary for char recognition
                CvInvoke.Resize(imgROI, imgROIResized, new Size(RESIZED_IMAGE_WIDTH, RESIZED_IMAGE_HEIGHT));

                //declare a Matrix of the same dimensions as the Image we are adding to the data structure of training images
                Matrix <float> mtxTemp = new Matrix <float>(imgROIResized.Size);

                //declare a flattened (only 1 row) matrix of the same total size
                Matrix <float> mtxTempReshaped = new Matrix <float>(1, RESIZED_IMAGE_WIDTH * RESIZED_IMAGE_HEIGHT);

                //convert Image to a Matrix of Singles with the same dimensions
                imgROIResized.ConvertTo(mtxTemp, DepthType.Cv32F);

                //flatten Matrix into one row by RESIZED_IMAGE_WIDTH * RESIZED_IMAGE_HEIGHT number of columns
                for (int intRow = 0; intRow <= RESIZED_IMAGE_HEIGHT - 1; intRow++)
                {
                    for (int intCol = 0; intCol <= RESIZED_IMAGE_WIDTH - 1; intCol++)
                    {
                        mtxTempReshaped[0, (intRow * RESIZED_IMAGE_WIDTH) + intCol] = mtxTemp[intRow, intCol];
                    }
                }

                float sngCurrentChar = 0;

                //finally we can call Predict !!!
                sngCurrentChar = kNearest.Predict(mtxTempReshaped);

                //append current char to full string of chars
                strFinalString = strFinalString + (char)sngCurrentChar;
            }

            //Console.WriteLine("results: " + results);
            //Console.WriteLine("neighborResponses: " + neighborResponses);
            //Console.WriteLine("dists: " + dists);
            //Console.WriteLine("results: " + results);

            return(cres);
        }