public void SetData(cv.Mat input) { //if (xSize != -1 || zSize != -1) //{ // surfaceMeshRenderableSeries.DataSeries.Clear(); //} zMap = new cv.Mat(new cv.Size(input.Width, input.Height), cv.MatType.CV_32FC1); input.ConvertTo(zMap, cv.MatType.CV_32FC1); //Init(zMap.Rows, zMap.Cols); Init(zMap.Rows, zMap.Cols); Parallel.For(0, xSize, x => { for (int z = 0; z < zSize; ++z) { MeshDataSeries[z, x] = zMap.At <float>(x, z); if (yMax < zMap.Get <float>(x, z)) { yMax = zMap.Get <float>(x, z); } } }); double min = 0.0f, max = 0.0f; zMap.MinMaxLoc(out min, out max); surfaceMeshRenderableSeries.Maximum = max; surfaceMeshRenderableSeries.Minimum = min; //backgroundSurfaceMesh.IsVisible = false; }
public void ApplyMinMaxCalc(cv.Mat input) { double zMax = double.MinValue; double zMin = double.MaxValue; for (int x = 0; x < xSize; ++x) { for (int z = 0; z < zSize; ++z) { if (zMax < input.At <double>(x, z)) { zMax = input.At <double>(x, z); } if (zMin > input.At <double>(x, z)) { zMin = input.At <double>(x, z); } } } for (int x = 0; x < xSize; ++x) { for (int z = 0; z < zSize; ++z) { input.Set <float>(x, z, input.At <float>(x, z) - (float)zMin); input.Set <float>(x, z, input.At <float>(x, z) * 1000.0f / (float)(zMax - zMin)); MeshDataSeries[z, x] = input.Get <float>(x, z); } } }
public void Run() { Console.WriteLine("===== FlannTest ====="); // creates data set using (var features = new Mat(10000, 2, MatType.CV_32FC1)) { var rand = new Random(); for (int i = 0; i < features.Rows; i++) { features.Set<float>(i, 0, rand.Next(10000)); features.Set<float>(i, 1, rand.Next(10000)); } // query var queryPoint = new Point2f(7777, 7777); var queries = new Mat(1, 2, MatType.CV_32FC1); queries.Set<float>(0, 0, queryPoint.X); queries.Set<float>(0, 1, queryPoint.Y); Console.WriteLine("query:({0}, {1})", queryPoint.X, queryPoint.Y); Console.WriteLine("-----"); // knnSearch using (var nnIndex = new Index(features, new KDTreeIndexParams(4))) { const int Knn = 1; int[] indices; float[] dists; nnIndex.KnnSearch(queries, out indices, out dists, Knn, new SearchParams(32)); for (int i = 0; i < Knn; i++) { int index = indices[i]; float dist = dists[i]; var pt = new Point2f(features.Get<float>(index, 0), features.Get<float>(index, 1)); Console.Write("No.{0}\t", i); Console.Write("index:{0}", index); Console.Write(" distance:{0}", dist); Console.Write(" data:({0}, {1})", pt.X, pt.Y); Console.WriteLine(); } } } Console.Read(); }
public void Start(cv.Mat color, cv.Mat result1, int filterSize, int levels) { int[] intensityBin = new int[levels]; int filterOffset = (filterSize - 1) / 2; int currentIntensity = 0, maxIntensity = 0, maxIndex = 0; for (int offsetY = filterOffset; offsetY < color.Height - filterOffset; offsetY++) { for (int offsetX = filterOffset; offsetX < color.Width - filterOffset; offsetX++) { maxIntensity = maxIndex = 0; intensityBin = new int[levels]; cv.Vec3i[] bins = new cv.Vec3i[levels]; for (int y = offsetY - filterOffset; y < offsetY + filterOffset; y++) { for (int x = offsetX - filterOffset; x < offsetX + filterOffset; x++) { cv.Vec3b rgb = color.Get <cv.Vec3b>(y, x); currentIntensity = (int)(Math.Round((Double)(rgb.Item0 + rgb.Item1 + rgb.Item2) / 3.0 * (levels - 1)) / 255.0); intensityBin[currentIntensity] += 1; bins[currentIntensity].Item0 += rgb.Item0; bins[currentIntensity].Item1 += rgb.Item1; bins[currentIntensity].Item2 += rgb.Item2; if (intensityBin[currentIntensity] > maxIntensity) { maxIntensity = intensityBin[currentIntensity]; maxIndex = currentIntensity; } } } if (maxIntensity == 0) { maxIntensity = 1; } double blue = bins[maxIndex].Item0 / maxIntensity; double green = bins[maxIndex].Item1 / maxIntensity; double red = bins[maxIndex].Item2 / maxIntensity; result1.Set <cv.Vec3b>(offsetY, offsetX, new cv.Vec3b(ClipByte(blue), ClipByte(green), ClipByte(red))); } } }
public void Run() { Mat src = Cv2.ImRead(FilePath.Image.Lenna, ImreadModes.GrayScale); // Histogram view const int Width = 260, Height = 200; Mat render = new Mat(new Size(Width, Height), MatType.CV_8UC3, Scalar.All(255)); // Calculate histogram Mat hist = new Mat(); int[] hdims = {256}; // Histogram size for each dimension Rangef[] ranges = { new Rangef(0,256), }; // min/max Cv2.CalcHist( new Mat[]{src}, new int[]{0}, null, hist, 1, hdims, ranges); // Get the max value of histogram double minVal, maxVal; Cv2.MinMaxLoc(hist, out minVal, out maxVal); Scalar color = Scalar.All(100); // Scales and draws histogram hist = hist * (maxVal != 0 ? Height / maxVal : 0.0); for (int j = 0; j < hdims[0]; ++j) { int binW = (int)((double)Width / hdims[0]); render.Rectangle( new Point(j * binW, render.Rows), new Point((j + 1) * binW, render.Rows - (int)(hist.Get<float>(j))), color, -1); } using (new Window("Image", WindowMode.AutoSize | WindowMode.FreeRatio, src)) using (new Window("Histogram", WindowMode.AutoSize | WindowMode.FreeRatio, render)) { Cv2.WaitKey(); } }
IEnumerator CalculateHomography() //void CalculateHomography() { //Debug.Log("CalculateHomography1"); //myCam.Pause(); yield return(new WaitForEndOfFrame()); //yield return new WaitForSeconds((float)0.5); //程式開始後至少要等0.3秒才會出現影像畫面,不然算sift一開始就會記憶體爆掉 //input camera image /*Texture2D sourceTex = ScreenCapture.CaptureScreenshotAsTexture(); * Color[] pix = sourceTex.GetPixels((int)rectBotLeft.x, (int)rectBotLeft.y, width, height); * Texture2D tex = new Texture2D(width, height); * tex.SetPixels(pix); * tex.Apply();*/ //Debug.Log("CalculateHomography2"); //rawimage position at (0,0),start from bottom left int xStart = (int)(Screen.width - rawImageRT.rect.width) / 2; int yStart = (int)(Screen.height - rawImageRT.rect.height) / 2; /*Debug.Log("xStart: "+xStart); * Debug.Log("yStart: "+yStart); * Debug.Log("Screen.width: "+Screen.width); * Debug.Log("Screen.height: "+Screen.height); * Debug.Log("rawImageRT.rect.width: "+rawImageRT.rect.width); * Debug.Log("rawImageRT.rect.height: "+rawImageRT.rect.height);*/ //get sign image with text Texture2D sourceTex = ScreenCapture.CaptureScreenshotAsTexture(); //rawImageRI.texture = sourceTex; //Color[] pix = sourceTex.GetPixels((int)rectBotLeft.x, (int)rectBotLeft.y, width, height); Color[] pix = sourceTex.GetPixels(xStart, yStart, (int)rawImageRT.rect.width, (int)rawImageRT.rect.height); tex = new Texture2D((int)rawImageRT.rect.width, (int)rawImageRT.rect.height); tex.SetPixels(pix); tex.Apply(); //Debug.Log("tex.width: "+tex.width); //Debug.Log("tex.height: "+tex.height); //input fixed image /*Texture2D tex = new Texture2D(2,2); * string imgPath = "../signboard-rectangle/test-199-fast-628.jpg"; * byte [] binaryImageData = File.ReadAllBytes(imgPath); * tex.LoadImage(binaryImageData);*/ //scale texture to make it smaller TextureScale.Bilinear(tex, tex.width / 2, tex.height / 2); //必要 防止記憶體爆炸 tex = TextureGray.ToGray(tex); //rawImageRI.texture = tex; mat = Unity.TextureToMat(tex); Destroy(sourceTex); Destroy(tex); //Cv2.ImShow("img", mat); ok //OpenCvSharp.Mat mat = Cv2.ImRead(imgPath, ImreadModes.Unchanged); //Debug.Log("mat: "+mat.ToString()); //string imgPath = "../signboard-rectangle/test-199-fast-628.jpg"; //OpenCvSharp.Mat mat = Cv2.ImRead(imgPath); InputArray imgCam = InputArray.Create(mat); desCam = OutputArray.Create(mat); //Cv2.ImShow("img", mat); ok //OpenCvSharp.Mat mat2 = mat; //sift = SIFT.Create(); //System.Diagnostics.Stopwatch time = new System.Diagnostics.Stopwatch(); //time.Start (); //卡卡 OpenCvSharp.KeyPoint[] kpCam = sift.Detect(mat); //OpenCvSharp.KeyPoint[] kpCam = surf.Detect(mat); //OpenCvSharp.KeyPoint[] kpCam = orb.Detect(mat); //OpenCvSharp.KeyPoint[] kpCam = brief.Detect(mat); //time.Stop(); //Debug.Log("執行 " + time.Elapsed.TotalSeconds + " 秒"); //myCam.Pause(); //rawImageRI.texture = tex; //Cv2.ImShow("img", mat); ok //Cv2.ImShow("img", mat2); ok sift.Compute(imgCam, ref kpCam, desCam); //surf.Compute(img2, ref kpCam, desCam); //orb.Compute(img2, ref kpCam, desCam); //brief.Compute(img2, ref kpCam, desCam); //Cv2.ImShow("img", mat); //Cv2.ImShow("img", mat2); 爆炸 OpenCvSharp.Mat desCammat = desCam.GetMat(); //Debug.Log("desCammat: "+desCammat); //if (!M) 如果還沒計算出homography M { //desFirstCatch = desCam; //OutputArray descriptors_object = des1; OpenCvSharp.Mat des1mat = des1.GetMat(); OpenCvSharp.Mat des2mat = des2.GetMat(); //OpenCvSharp.Mat des3mat = des3.GetMat(); //Debug.Log("des1mat: "+des1mat); OpenCvSharp.DMatch[] dmatch1 = descriptorMatcher.Match(des1mat, desCammat); OpenCvSharp.DMatch[] dmatch2 = descriptorMatcher.Match(des2mat, desCammat); //OpenCvSharp.DMatch[] dmatch3 = descriptorMatcher.Match(des3mat, desCammat); //Debug.Log("damtch1[0]: "+dmatch1[0].ToString()); //} //else { //OpenCvSharp.Mat desFirstCatchmat = desFirstCatch.GetMat(); // OpenCvSharp.DMatch[] dmatch = descriptorMatcher.Match(desFirstCatchmat, desCammat); // OutputArray descriptors_object = desFirstCatch; //} double max_dist1 = 0; double min_dist1 = 100; double max_dist2 = 0; double min_dist2 = 100; //double max_dist3 = 0; //double min_dist3 = 100; //Cv2.ImShow("img", mat); 爆炸 //Quick calculation of max and min distances between keypoints foreach (OpenCvSharp.DMatch d in dmatch1) { double dist = d.Distance; if (dist < min_dist1) { min_dist1 = dist; } if (dist > max_dist1) { max_dist1 = dist; } } foreach (OpenCvSharp.DMatch d in dmatch2) { double dist = d.Distance; if (dist < min_dist2) { min_dist2 = dist; } if (dist > max_dist2) { max_dist2 = dist; } } /*foreach (OpenCvSharp.DMatch d in dmatch3){ * double dist = d.Distance; * if( dist < min_dist3 ) min_dist3 = dist; * if( dist > max_dist3 ) max_dist3 = dist; * }*/ //Draw only "good" matches (i.e. whose distance is less than 3*min_dist ) List <OpenCvSharp.DMatch> goodMatch1 = new List <OpenCvSharp.DMatch>(); foreach (OpenCvSharp.DMatch d in dmatch1) { if (d.Distance < 3 * min_dist1) { goodMatch1.Add(d); } } List <OpenCvSharp.DMatch> goodMatch2 = new List <OpenCvSharp.DMatch>(); foreach (OpenCvSharp.DMatch d in dmatch2) { if (d.Distance < 3 * min_dist2) { goodMatch2.Add(d); } } /*List<OpenCvSharp.DMatch> goodMatch3 = new List<OpenCvSharp.DMatch>(); * foreach (OpenCvSharp.DMatch d in dmatch3){ * if( d.Distance < 3*min_dist3 ) * goodMatch3.Add(d); * }*/ List <OpenCvSharp.Point2f> srcPts1 = new List <OpenCvSharp.Point2f>(); List <OpenCvSharp.Point2f> dstPts1 = new List <OpenCvSharp.Point2f>(); foreach (OpenCvSharp.DMatch d in goodMatch1) { //-- Get the keypoints from the good matches srcPts1.Add(kp1[d.QueryIdx].Pt); dstPts1.Add(kpCam[d.TrainIdx].Pt); //Debug.Log("kp1[d.QueryIdx].Pt: "+kp1[d.QueryIdx].Pt); } List <OpenCvSharp.Point2f> srcPts2 = new List <OpenCvSharp.Point2f>(); List <OpenCvSharp.Point2f> dstPts2 = new List <OpenCvSharp.Point2f>(); foreach (OpenCvSharp.DMatch d in goodMatch2) { //-- Get the keypoints from the good matches srcPts2.Add(kp2[d.QueryIdx].Pt); dstPts2.Add(kpCam[d.TrainIdx].Pt); //Debug.Log("kp1[d.QueryIdx].Pt: "+kp1[d.QueryIdx].Pt); } /*List<OpenCvSharp.Point2f> srcPts3 = new List<OpenCvSharp.Point2f>(); * List<OpenCvSharp.Point2f> dstPts3 = new List<OpenCvSharp.Point2f>(); * foreach (OpenCvSharp.DMatch d in goodMatch3){ * //-- Get the keypoints from the good matches * srcPts3.Add(kp3[d.QueryIdx].Pt); * dstPts3.Add(kpCam[d.TrainIdx].Pt); * //Debug.Log("kp1[d.QueryIdx].Pt: "+kp1[d.QueryIdx].Pt); * }*/ //jump to next iteration if less than certain number of keypoints matched if (srcPts1.Count < 200 && srcPts2.Count < 200) { yield break; } if (srcPts1.Count >= srcPts2.Count) { srcPts = new List <OpenCvSharp.Point2f>(srcPts1); dstPts = new List <OpenCvSharp.Point2f>(dstPts1); text1.enabled = true; text2.enabled = false; num1++; //text3.enabled = false; } /*else if(srcPts2.Count >= srcPts1.Count && srcPts2.Count >= srcPts3.Count){ * srcPts = new List<OpenCvSharp.Point2f>(srcPts2); * dstPts = new List<OpenCvSharp.Point2f>(dstPts2); * text2.enabled = true; * text1.enabled = false; * text3.enabled = false; * }*/ else { srcPts = new List <OpenCvSharp.Point2f>(srcPts2); dstPts = new List <OpenCvSharp.Point2f>(dstPts2); text2.enabled = true; text1.enabled = false; num2++; //text2.enabled = false; } if (num1 > num2 + 10) { text1.enabled = true; text2.enabled = false; } if (num2 > num1 + 10) { text2.enabled = true; text1.enabled = false; } if (num1 > 60 || num2 > 60) { num1 = 0; num2 = 0; } //OpenCvSharp.Mat mat2 = mat; //Cv2.DrawKeypoints(mat, kpCam, mat2); //Cv2.ImShow("img", mat); 亂碼圖 //Texture2D tex2 = new Texture2D(8, 8); //tex2 = Unity.MatToTexture(mat); //rawImageRI.texture = tex2; //myCam.Pause(); //Cv2.ImShow("img", mat2); 亂碼圖 Texture2D emptyTex = new Texture2D(8, 8); OpenCvSharp.Mat outputImg = Unity.TextureToMat(emptyTex); //Debug.Log("outputImg: "+outputImg.ToString()); InputArray srcArr = InputArray.Create <OpenCvSharp.Point2f>(srcPts); InputArray dstArr = InputArray.Create <OpenCvSharp.Point2f>(dstPts); OutputArray mask = OutputArray.Create(outputImg); OpenCvSharp.Mat M = Cv2.FindHomography(srcArr, dstArr, HomographyMethods.Ransac, 5, mask); OpenCVForUnity.Mat transMat = new OpenCVForUnity.Mat(3, 3, CvType.CV_32FC1); transMat.put(0, 0, M.Get <double>(0, 0), M.Get <double>(0, 1), M.Get <double>(0, 2), M.Get <double>(1, 0), M.Get <double>(1, 1), M.Get <double>(1, 2), M.Get <double>(2, 0), M.Get <double>(2, 1), M.Get <double>(2, 2)); //Debug.Log("transMat: "+transMat.dump()); //Debug.Log("mask: "+mask); //OpenCvSharp.Mat maskMat = mask.GetMat(); //Debug.Log("maskMat: "+maskMat.ToString()); //maskMoB = new OpenCvSharp.MatOfByte(maskMat); //-- Get the corners from the image_1 ( the object to be "detected" ) /*OpenCvSharp.Point2f[] obj_corners = new OpenCvSharp.Point2f[4]; * obj_corners[0] = new OpenCvSharp.Point2f(0, 0); * obj_corners[1] = new OpenCvSharp.Point2f(inputTex.width, 0); * obj_corners[2] = new OpenCvSharp.Point2f(inputTex.width, inputTex.height); * obj_corners[3] = new OpenCvSharp.Point2f(0, inputTex.height); * * //OpenCvSharp.Point2f[] scene_corners = new OpenCvSharp.Point2f[4]; * //scene_corners = Cv2.PerspectiveTransform(obj_corners, M); * * //if (!M) 如果還沒計算出homography M { * //Cv2.DrawMatches(inputImg, kp1, mat, kpCam, goodMatch, outputImg, OpenCvSharp.Scalar.All(-1), * //OpenCvSharp.Scalar.All(-1), maskMoB.ToArray(), DrawMatchesFlags.NotDrawSinglePoints); * //else { * * //Texture2D outputTex = Unity.MatToTexture(outputImg); * //rawImageRI.texture = outputTex; * * //-- Draw lines between the corners (the mapped object in the scene - image_2 ) * //Cv2.Line(outputImg, scene_corners[0] + obj_corners[1], scene_corners[1] + obj_corners[1], OpenCvSharp.Scalar.LightBlue, 4); * //Cv2.Line(outputImg, scene_corners[1] + obj_corners[1], scene_corners[2] + obj_corners[1], OpenCvSharp.Scalar.LightBlue, 4); * //Cv2.Line(outputImg, scene_corners[2] + obj_corners[1], scene_corners[3] + obj_corners[1], OpenCvSharp.Scalar.LightBlue, 4); * //Cv2.Line(outputImg, scene_corners[3] + obj_corners[1], scene_corners[0] + obj_corners[1], OpenCvSharp.Scalar.LightBlue, 4); * * //OpenCvSharp.Mat outimg = Unity.TextureToMat(emptyTex); * //inputImg = Unity.TextureToMat(emptyTex); * //Cv2.DrawKeypoints(mat, kpCam, outimg, OpenCvSharp.Scalar.LightBlue); * * //show image with text after homography * /*string imgPath2 = "../signboard-rectangle/test-IMG_0204-text.PNG"; * textTex = new Texture2D(2,2); * byte [] binaryImageData2 = File.ReadAllBytes(imgPath2); * textTex.LoadImage(binaryImageData2); * rawImageRI.texture = textTex;*/ /*OpenCVForUnity.Mat inputTextImg = new OpenCVForUnity.Mat(new OpenCVForUnity.Size(textTex.width, textTex.height), CvType.CV_8UC4); * Utils.texture2DToMat(textTex, inputTextImg); * OpenCVForUnity.Mat outputTextImg = new OpenCVForUnity.Mat(new OpenCVForUnity.Size(textTex.width, textTex.height), CvType.CV_8UC4); * * Imgproc.warpPerspective(inputTextImg, outputTextImg, transMat, new OpenCVForUnity.Size(textTex.width, textTex.height)); * * Texture2D outputTex = new Texture2D((int)textTex.width, (int)textTex.height, TextureFormat.RGB24, false); * Utils.matToTexture2D(outputTextImg, outputTex);*/ //TextureScale.Bilinear(outputTex, outputTex.width/5, outputTex.height/5); //rawImageRI.texture = outputTex; //text.enabled = true; /*Vector3 scale; * scale.x = new Vector4((float)M.Get<double>(0,0), (float)M.Get<double>(1,0), (float)M.Get<double>(2,0), 0).magnitude; * scale.y = new Vector4((float)M.Get<double>(0,1), (float)M.Get<double>(1,1), (float)M.Get<double>(2,1), 0).magnitude; * scale.z = new Vector4((float)M.Get<double>(0,2), (float)M.Get<double>(1,2), (float)M.Get<double>(2,2), 0).magnitude; * * Vector3 forward; * forward.x = (float)M.Get<double>(0,2); * forward.y = (float)M.Get<double>(1,2); * forward.z = (float)M.Get<double>(2,2); * * Vector3 upwards; * upwards.x = (float)M.Get<double>(0,1); * upwards.y = (float)M.Get<double>(1,1); * upwards.z = (float)M.Get<double>(2,1); * * //textRT.localScale = scale; * //textRT.rotation = Quaternion.LookRotation(forward, upwards);*/ Matrix4x4 matrix = new Matrix4x4(); /*matrix.SetRow(0, new Vector4((float)M.Get<double>(0,0), (float)M.Get<double>(0,1), (float)M.Get<double>(0,2),0)); * matrix.SetRow(1, new Vector4((float)M.Get<double>(1,0), (float)M.Get<double>(1,1), (float)M.Get<double>(1,2),0)); * matrix.SetRow(2, new Vector4(0,0,1,0)); * matrix.SetRow(3, new Vector4(0,0,0,1));*/ //inverse效果還行 matrix.SetRow(0, new Vector4((float)M.Get <double>(0, 0), (float)M.Get <double>(0, 1), 0, (float)M.Get <double>(0, 2))); matrix.SetRow(1, new Vector4((float)M.Get <double>(1, 0), (float)M.Get <double>(1, 1), 0, (float)M.Get <double>(1, 2))); matrix.SetRow(2, new Vector4(0, 0, 1, 0)); matrix.SetRow(3, new Vector4(0, 0, 0, 1)); Matrix4x4 inverse = matrix.inverse; //textRT.localScale = matrix.lossyScale; //textRT.rotation = matrix.rotation; //rotation跟eulerangles效果一樣 textRT1.rotation = inverse.rotation; textRT2.rotation = inverse.rotation; //textRT3.rotation = inverse.rotation; Destroy(emptyTex); //calculate euler angle /*double angleX = Math.Asin(-M.Get<double>(2,1)); * double angleY = Math.Atan2(M.Get<double>(2,0), M.Get<double>(2,2)); * double angleZ = Math.Atan2(M.Get<double>(0,1), M.Get<double>(1,1)); * //textRT.eulerAngles = new Vector3((float)angleX, (float)angleY, (float)angleZ); * //Debug.Log("textRT.eulerAngles: "+textRT.eulerAngles.ToString()); * * //calculate quaternion * double w = Math.Sqrt(1 + M.Get<double>(0,0) + M.Get<double>(1,1) + M.Get<double>(2,2))/2; * double w4 = w*4; * double qx = (M.Get<double>(2,1) - M.Get<double>(1,2))/w4 ; * double qy = (M.Get<double>(0,2) - M.Get<double>(2,0))/w4 ; * double qz = (M.Get<double>(1,0) - M.Get<double>(0,1))/w4 ; * //textRT.rotation = new Quaternion((float)qx, (float)qy, (float)qz, 1); * * double tr = M.Get<double>(0,0) + M.Get<double>(1,1) + M.Get<double>(2,2); * Debug.Log("tr: "+tr);*/ //Cv2.ImShow("img", mat); //myCam.Pause(); }
/// <summary> /// Classical Multidimensional Scaling /// </summary> public void Run() { // creates distance matrix int size = CityDistance.GetLength(0); Mat t = new Mat(size, size, MatType.CV_64FC1, CityDistance); // adds Torgerson's additive constant to t double torgarson = Torgerson(t); t += torgarson; // squares all elements of t t = t.Mul(t); // centering matrix G Mat g = CenteringMatrix(size); // calculates inner product matrix B Mat b = g * t * g.T() * -0.5; // calculates eigenvalues and eigenvectors of B Mat values = new Mat(); Mat vectors = new Mat(); Cv2.Eigen(b, values, vectors); for (int r = 0; r < values.Rows; r++) { if (values.Get<double>(r) < 0) values.Set<double>(r, 0); } //Console.WriteLine(values.Dump()); // multiplies sqrt(eigenvalue) by eigenvector Mat result = vectors.RowRange(0, 2); { var at = result.GetGenericIndexer<double>(); for (int r = 0; r < result.Rows; r++) { for (int c = 0; c < result.Cols; c++) { at[r, c] *= Math.Sqrt(values.Get<double>(r)); } } } // scaling Cv2.Normalize(result, result, 0, 800, NormTypes.MinMax); // opens a window using (Mat img = Mat.Zeros(600, 800, MatType.CV_8UC3)) using (Window window = new Window("City Location Estimation")) { var at = result.GetGenericIndexer<double>(); for (int c = 0; c < size; c++) { double x = at[0, c]; double y = at[1, c]; x = x * 0.7 + img.Width * 0.1; y = y * 0.7 + img.Height * 0.1; img.Circle((int)x, (int)y, 5, Scalar.Red, -1); Point textPos = new Point(x + 5, y + 10); img.PutText(CityNames[c], textPos, HersheyFonts.HersheySimplex, 0.5, Scalar.White); } window.Image = img; Cv2.WaitKey(); } }