private Image <Bgr, Byte> knn() { int K = 10; int trainSampleCount = 150; int sigma = 60; #region Generate the training data and classes Matrix <float> trainData = new Matrix <float>(trainSampleCount, 2); Matrix <float> trainClasses = new Matrix <float>(trainSampleCount, 1); Image <Bgr, Byte> img = new Image <Bgr, byte>(500, 500); Matrix <float> sample = new Matrix <float>(1, 2); Matrix <float> trainData1 = trainData.GetRows(0, trainSampleCount / 3, 1); trainData1.GetCols(0, 1).SetRandNormal(new MCvScalar(100), new MCvScalar(sigma)); trainData1.GetCols(1, 2).SetRandNormal(new MCvScalar(300), new MCvScalar(sigma)); Matrix <float> trainData2 = trainData.GetRows(trainSampleCount / 3, 2 * trainSampleCount / 3, 1); trainData2.SetRandNormal(new MCvScalar(400), new MCvScalar(sigma)); Matrix <float> trainData3 = trainData.GetRows(2 * trainSampleCount / 3, trainSampleCount, 1); trainData3.GetCols(0, 1).SetRandNormal(new MCvScalar(300), new MCvScalar(sigma)); trainData3.GetCols(1, 2).SetRandNormal(new MCvScalar(100), new MCvScalar(sigma)); Matrix <float> trainClasses1 = trainClasses.GetRows(0, trainSampleCount / 3, 1); trainClasses1.SetValue(1); Matrix <float> trainClasses2 = trainClasses.GetRows(trainSampleCount / 3, 2 * trainSampleCount / 3, 1); trainClasses2.SetValue(2); Matrix <float> trainClasses3 = trainClasses.GetRows(2 * trainSampleCount / 3, trainSampleCount, 1); trainClasses3.SetValue(3); #endregion Matrix <float> results, neighborResponses; results = new Matrix <float>(sample.Rows, 1); neighborResponses = new Matrix <float>(sample.Rows, K); //dist = new Matrix<float>(sample.Rows, K); //using (KNearest knn = new KNearest(trainData, trainClasses, null, false, K)) { using (KNearest knn = new KNearest()) { bool trained = knn.Train(trainData, trainClasses, null, false, K, false); for (int i = 0; i < img.Height; i++) { for (int j = 0; j < img.Width; j++) { sample.Data[0, 0] = j; sample.Data[0, 1] = i; //Matrix<float> nearestNeighbors = new Matrix<float>(K* sample.Rows, sample.Cols); // estimates the response and get the neighbors' labels float response = knn.FindNearest(sample, K, results, null, neighborResponses, null); int accuracy = 0; // compute the number of neighbors representing the majority for (int k = 0; k < K; k++) { if (neighborResponses.Data[0, k] == response) { accuracy++; } } // highlight the pixel depending on the accuracy (or confidence) //img[i, j] = //response == 1 ? // (accuracy > 5 ? new Bgr(90, 0, 0) : new Bgr(90, 60, 0)) : // (accuracy > 5 ? new Bgr(0, 90, 0) : new Bgr(60, 90, 0)); img[i, j] = response == 1 ? (accuracy > 5 ? new Bgr(90, 0, 0) : new Bgr(90, 30, 30)) : response == 2 ? (accuracy > 5 ? new Bgr(0, 90, 0) : new Bgr(30, 90, 30)) : (accuracy > 5 ? new Bgr(0, 0, 90) : new Bgr(30, 30, 90)); } } knn.Save(@"D:\Play Data\KNN训练数据"); } // display the original training samples for (int i = 0; i < (trainSampleCount / 3); i++) { PointF p1 = new PointF(trainData1[i, 0], trainData1[i, 1]); img.Draw(new CircleF(p1, 2.0f), new Bgr(255, 100, 100), -1); PointF p2 = new PointF(trainData2[i, 0], trainData2[i, 1]); img.Draw(new CircleF(p2, 2.0f), new Bgr(100, 255, 100), -1); PointF p3 = new PointF(trainData3[i, 0], trainData3[i, 1]); img.Draw(new CircleF(p3, 2.0f), new Bgr(100, 100, 255), -1); } return(img); }
public void TestKNearest() { int K = 10; int trainSampleCount = 100; #region Generate the training data and classes Matrix<float> trainData = new Matrix<float>(trainSampleCount, 2); Matrix<float> trainClasses = new Matrix<float>(trainSampleCount, 1); Image<Bgr, Byte> img = new Image<Bgr, byte>(500, 500); Matrix<float> sample = new Matrix<float>(1, 2); Matrix<float> trainData1 = trainData.GetRows(0, trainSampleCount >> 1, 1); trainData1.SetRandNormal(new MCvScalar(200), new MCvScalar(50)); Matrix<float> trainData2 = trainData.GetRows(trainSampleCount >> 1, trainSampleCount, 1); trainData2.SetRandNormal(new MCvScalar(300), new MCvScalar(50)); Matrix<float> trainClasses1 = trainClasses.GetRows(0, trainSampleCount >> 1, 1); trainClasses1.SetValue(1); Matrix<float> trainClasses2 = trainClasses.GetRows(trainSampleCount >> 1, trainSampleCount, 1); trainClasses2.SetValue(2); #endregion Matrix<float> results, neighborResponses; results = new Matrix<float>(sample.Rows, 1); neighborResponses = new Matrix<float>(sample.Rows, K); //dist = new Matrix<float>(sample.Rows, K); using (KNearest knn = new KNearest(trainData, trainClasses, null, false, K)) { //TODO: find out when knn.save will be implemented //knn.Save("knn.xml"); for (int i = 0; i < img.Height; i++) { for (int j = 0; j < img.Width; j++) { sample.Data[0, 0] = j; sample.Data[0, 1] = i; // estimates the response and get the neighbors' labels float response = knn.FindNearest(sample, K, results, null, neighborResponses, null); int accuracy = 0; // compute the number of neighbors representing the majority for (int k = 0; k < K; k++) { if (neighborResponses.Data[0, k] == response) accuracy++; } // highlight the pixel depending on the accuracy (or confidence) img[i, j] = response == 1 ? (accuracy > 5 ? new Bgr(90, 0, 0) : new Bgr(90, 40, 0)) : (accuracy > 5 ? new Bgr(0, 90, 0) : new Bgr(40, 90, 0)); } } } // display the original training samples for (int i = 0; i < (trainSampleCount >> 1); i++) { PointF p1 = new PointF(trainData1[i, 0], trainData1[i, 1]); img.Draw(new CircleF(p1, 2.0f), new Bgr(255, 100, 100), -1); PointF p2 = new PointF(trainData2[i, 0], trainData2[i, 1]); img.Draw(new CircleF(p2, 2.0f), new Bgr(100, 255, 100), -1); } }
public String DoOCR_ReturnString(KNearest kNearest, Mat src) { String RtnString = null; var gray = new Mat(); Cv2.CvtColor(src, gray, ColorConversionCodes.BGRA2GRAY); var threshImage = new Mat(); Cv2.Threshold(gray, threshImage, Thresh, ThresholdMaxVal, ThresholdTypes.BinaryInv); // Threshold to find contour OpenCvSharp.Point[][] contours; HierarchyIndex[] hierarchyIndexes; Cv2.FindContours( threshImage, out contours, out hierarchyIndexes, mode: RetrievalModes.CComp, method: ContourApproximationModes.ApproxSimple); if (contours.Length == 0) { //throw new NotSupportedException("Couldn't find any object in the image."); return(null); } //Create input sample by contour finding and cropping var dst = new Mat(src.Rows, src.Cols, MatType.CV_8UC3, Scalar.All(0)); var contourIndex = 0; while ((contourIndex >= 0)) { var contour = contours[contourIndex]; var boundingRect = Cv2.BoundingRect(contour); //Find bounding rect for each contour Cv2.Rectangle(src, new OpenCvSharp.Point(boundingRect.X, boundingRect.Y), new OpenCvSharp.Point(boundingRect.X + boundingRect.Width, boundingRect.Y + boundingRect.Height), new Scalar(0, 0, 255), 2); var roi = new Mat(threshImage, boundingRect); //Crop the image var resizedImage = new Mat(); var resizedImageFloat = new Mat(); Cv2.Resize(roi, resizedImage, new OpenCvSharp.Size(10, 10)); //resize to 10X10 resizedImage.ConvertTo(resizedImageFloat, MatType.CV_32FC1); //convert to float var result = resizedImageFloat.Reshape(1, 1); var results = new Mat(); var neighborResponses = new Mat(); var dists = new Mat(); var detectedClass = (int)kNearest.FindNearest(result, 1, results, neighborResponses, dists); RtnString = RtnString + detectedClass.ToString(CultureInfo.InvariantCulture); contourIndex = hierarchyIndexes[contourIndex].Next; } return(RtnString); }
public void TestKNearest() { int K = 10; int trainSampleCount = 100; #region Generate the training data and classes Matrix <float> trainData = new Matrix <float>(trainSampleCount, 2); Matrix <float> trainClasses = new Matrix <float>(trainSampleCount, 1); Image <Bgr, Byte> img = new Image <Bgr, byte>(500, 500); Matrix <float> sample = new Matrix <float>(1, 2); Matrix <float> trainData1 = trainData.GetRows(0, trainSampleCount >> 1, 1); trainData1.SetRandNormal(new MCvScalar(200), new MCvScalar(50)); Matrix <float> trainData2 = trainData.GetRows(trainSampleCount >> 1, trainSampleCount, 1); trainData2.SetRandNormal(new MCvScalar(300), new MCvScalar(50)); Matrix <float> trainClasses1 = trainClasses.GetRows(0, trainSampleCount >> 1, 1); trainClasses1.SetValue(1); Matrix <float> trainClasses2 = trainClasses.GetRows(trainSampleCount >> 1, trainSampleCount, 1); trainClasses2.SetValue(2); #endregion Matrix <float> results, neighborResponses; results = new Matrix <float>(sample.Rows, 1); neighborResponses = new Matrix <float>(sample.Rows, K); //dist = new Matrix<float>(sample.Rows, K); using (KNearest knn = new KNearest(trainData, trainClasses, null, false, K)) { //TODO: find out when knn.save will be implemented //knn.Save("knn.xml"); for (int i = 0; i < img.Height; i++) { for (int j = 0; j < img.Width; j++) { sample.Data[0, 0] = j; sample.Data[0, 1] = i; // estimates the response and get the neighbors' labels float response = knn.FindNearest(sample, K, results, null, neighborResponses, null); int accuracy = 0; // compute the number of neighbors representing the majority for (int k = 0; k < K; k++) { if (neighborResponses.Data[0, k] == response) { accuracy++; } } // highlight the pixel depending on the accuracy (or confidence) img[i, j] = response == 1 ? (accuracy > 5 ? new Bgr(90, 0, 0) : new Bgr(90, 40, 0)) : (accuracy > 5 ? new Bgr(0, 90, 0) : new Bgr(40, 90, 0)); } } } // display the original training samples for (int i = 0; i < (trainSampleCount >> 1); i++) { PointF p1 = new PointF(trainData1[i, 0], trainData1[i, 1]); img.Draw(new CircleF(p1, 2.0f), new Bgr(255, 100, 100), -1); PointF p2 = new PointF(trainData2[i, 0], trainData2[i, 1]); img.Draw(new CircleF(p2, 2.0f), new Bgr(100, 255, 100), -1); } }
public void DoOCR(KNearest kNearest, string path) { var src = Cv2.ImRead(path); Cv2.ImShow("Source", src); var gray = new Mat(); Cv2.CvtColor(src, gray, ColorConversionCodes.BGRA2GRAY); var threshImage = new Mat(); Cv2.Threshold(gray, threshImage, Thresh, ThresholdMaxVal, ThresholdTypes.BinaryInv); // Threshold to find contour Point[][] contours; HierarchyIndex[] hierarchyIndexes; Cv2.FindContours( threshImage, out contours, out hierarchyIndexes, mode: RetrievalModes.CComp, method: ContourApproximationModes.ApproxSimple); if (contours.Length == 0) { throw new NotSupportedException("Couldn't find any object in the image."); } //Create input sample by contour finding and cropping var dst = new Mat(src.Rows, src.Cols, MatType.CV_8UC3, Scalar.All(0)); var contourIndex = 0; while ((contourIndex >= 0)) { var contour = contours[contourIndex]; var boundingRect = Cv2.BoundingRect(contour); //Find bounding rect for each contour Cv2.Rectangle(src, new Point(boundingRect.X, boundingRect.Y), new Point(boundingRect.X + boundingRect.Width, boundingRect.Y + boundingRect.Height), new Scalar(0, 0, 255), 2); var roi = new Mat(threshImage, boundingRect); //Crop the image var resizedImage = new Mat(); var resizedImageFloat = new Mat(); Cv2.Resize(roi, resizedImage, new Size(10, 10)); //resize to 10X10 resizedImage.ConvertTo(resizedImageFloat, MatType.CV_32FC1); //convert to float var result = resizedImageFloat.Reshape(1, 1); var results = new Mat(); var neighborResponses = new Mat(); var dists = new Mat(); var detectedClass = (int)kNearest.FindNearest(result, 1, results, neighborResponses, dists); //Console.WriteLine("DetectedClass: {0}", detectedClass); //Cv2.ImShow("roi", roi); //Cv.WaitKey(0); //Cv2.ImWrite(string.Format("det_{0}_{1}.png",detectedClass, contourIndex), roi); Cv2.PutText( dst, detectedClass.ToString(CultureInfo.InvariantCulture), new Point(boundingRect.X, boundingRect.Y + boundingRect.Height), 0, 1, new Scalar(0, 255, 0), 2); contourIndex = hierarchyIndexes[contourIndex].Next; } Cv2.ImShow("Segmented Source", src); Cv2.ImShow("Detected", dst); Cv2.ImWrite("dest.jpg", dst); Cv2.WaitKey(); }