public Mat OnCameraFrame(CameraBridgeViewBase.ICvCameraViewFrame inputFrame) { int viewMode = mViewMode; switch (viewMode) { case VIEW_MODE_GRAY: // input frame has gray scale format Imgproc.CvtColor(inputFrame.Gray(), mRgba, Imgproc.ColorGray2rgba, 4); break; case VIEW_MODE_RGBA: // input frame has RBGA format mRgba = inputFrame.Rgba(); break; case VIEW_MODE_CANNY: // input frame has gray scale format mRgba = inputFrame.Rgba(); Imgproc.Canny(inputFrame.Gray(), mIntermediateMat, 80, 100); Imgproc.CvtColor(mIntermediateMat, mRgba, Imgproc.ColorGray2rgba, 4); break; case VIEW_MODE_FEATURES: // input frame has RGBA format mRgba = inputFrame.Rgba(); mGray = inputFrame.Gray(); FindFeatures(JNIEnv.Handle, JNIEnv.FindClass(typeof(Java.Lang.Object)), mGray.NativeObjAddr, mRgba.NativeObjAddr); break; } return(mRgba); }
// Update is called once per frame void Update() { tex2 = MakeTexture2D(carm.targetTexture); Mat imgMat = new Mat(tex2.height, tex2.width, CvType.CV_8UC3); Utils.texture2DToMat(tex2, imgMat); //GrayScale 생성 Mat grayMat = new Mat(); Imgproc.cvtColor(imgMat, grayMat, Imgproc.COLOR_RGB2GRAY); //Canny적용 Mat cannyMat = new Mat(); Imgproc.Canny(grayMat, cannyMat, 70, 210); //ROI 설정 grayMat.adjustROI(cannyMat.height(), 0, cannyMat.width() * 4 / 10, cannyMat.width()); //HoughLine생성 Mat lines = new Mat(); Imgproc.HoughLinesP(cannyMat, lines, 1, Mathf.PI / 180, 30, 100, 20); int[] linesArray = new int[lines.cols() * lines.rows() * lines.channels()]; lines.get(0, 0, linesArray); for (int i = 0; i < linesArray.Length; i = i + 4) { //Debug.Log(Mathf.Atan2((linesArray[i + 2] - linesArray[i + 0]), (linesArray[i + 3] - linesArray[i + 1])) * Mathf.Rad2Deg); if (0 <= Mathf.Atan2((linesArray[i + 2] - linesArray[i + 0]), (linesArray[i + 3] - linesArray[i + 1])) * Mathf.Rad2Deg) { if (80 > Mathf.Atan2((linesArray[i + 2] - linesArray[i + 0]), (linesArray[i + 3] - linesArray[i + 1])) * Mathf.Rad2Deg) { Imgproc.line(imgMat, new Point(linesArray[i + 0], linesArray[i + 1]), new Point(linesArray[i + 2], linesArray[i + 3]), new Scalar(255, 0, 0), 4); } } if (180 >= Mathf.Atan2((linesArray[i + 2] - linesArray[i + 0]), (linesArray[i + 3] - linesArray[i + 1])) * Mathf.Rad2Deg) { if (100 < Mathf.Atan2((linesArray[i + 2] - linesArray[i + 0]), (linesArray[i + 3] - linesArray[i + 1])) * Mathf.Rad2Deg) { Imgproc.line(imgMat, new Point(linesArray[i + 0], linesArray[i + 1]), new Point(linesArray[i + 2], linesArray[i + 3]), new Scalar(255, 0, 0), 4); } } } Texture2D texture = new Texture2D(imgMat.cols(), imgMat.rows(), TextureFormat.RGBA32, false); Utils.matToTexture2D(imgMat, texture); GameObject.Find("test3").GetComponent <Renderer>().material.mainTexture = texture; }
public Mat OnCameraFrame(Mat p0) { Mat p1 = new Mat(); Imgproc.Canny(p0, p1, 255, 64); return(p1); }
public static Bitmap getEdgeDetectedImage(this Bitmap src, Filter_Type filter_type) { Bitmap resizedBitmap = Bitmap.CreateScaledBitmap(src, (src.Width * 256) / src.Height, 256, true); OpenCV.Core.Mat resizedMat = new OpenCV.Core.Mat(); OpenCV.Android.Utils.BitmapToMat(resizedBitmap, resizedMat); OpenCV.Core.Mat gaussianMat = new OpenCV.Core.Mat(); Imgproc.GaussianBlur(resizedMat, gaussianMat, new OpenCV.Core.Size(3, 3), 0, 0); OpenCV.Core.Mat grayMat = new OpenCV.Core.Mat(); Imgproc.CvtColor(gaussianMat, grayMat, Imgproc.ColorRgba2gray, 2); OpenCV.Core.Mat edgeDetectedMat = new OpenCV.Core.Mat(); if (filter_type == Filter_Type.CANNY) { Imgproc.Canny(grayMat, edgeDetectedMat, 100, 100); } else { OpenCV.Core.Mat sobelMat = new OpenCV.Core.Mat(); Imgproc.Sobel(grayMat, sobelMat, CvType.Cv8u, 1, 1); Core.ConvertScaleAbs(sobelMat, edgeDetectedMat, 6, 1); } Bitmap resultBitmap = Bitmap.CreateBitmap(resizedBitmap.Width, resizedBitmap.Height, Bitmap.Config.Argb8888); OpenCV.Android.Utils.MatToBitmap(edgeDetectedMat, resultBitmap); return(resultBitmap); }
public int matchDice(Mat src, OpenCVForUnity.Rect rect, Mat temp) { Mat subRGB = new Mat(src, rect); //灰階 Mat grayMat = new Mat(); Imgproc.cvtColor(subRGB, grayMat, Imgproc.COLOR_RGB2GRAY); Mat hierarchy = new Mat(); List <MatOfPoint> contours = new List <MatOfPoint>(); //模糊.Canny.侵蝕膨脹 Imgproc.blur(grayMat, grayMat, new Size(3, 3)); Imgproc.Canny(grayMat, grayMat, 50, 150); morphOps(grayMat); //找輪廓 Imgproc.findContours(grayMat, contours, hierarchy, Imgproc.RETR_LIST, Imgproc.CHAIN_APPROX_SIMPLE); for (int i = 0; i < contours.Count; i++) { Imgproc.drawContours(temp, contours, i, new Scalar(255, 255, 255), 2); } //回傳輪廓數目 return(contours.Count); }
// Update is called once per frame void Update() { if (webCamTextureToMatHelper.IsPlaying() && webCamTextureToMatHelper.DidUpdateThisFrame()) { Mat rgbaMat = webCamTextureToMatHelper.GetMat(); Imgproc.cvtColor(rgbaMat, grayMat, Imgproc.COLOR_RGBA2GRAY); // Utils.webCamTextureToMat (webCamTexture, grayMat, colors); bgMat.copyTo(dstMat); Imgproc.GaussianBlur(grayMat, lineMat, new Size(3, 3), 0); grayMat.get(0, 0, grayPixels); for (int i = 0; i < grayPixels.Length; i++) { maskPixels [i] = 0; if (grayPixels [i] < 70) { grayPixels [i] = 0; maskPixels [i] = 1; } else if (70 <= grayPixels [i] && grayPixels [i] < 120) { grayPixels [i] = 100; } else { grayPixels [i] = 255; maskPixels [i] = 1; } } grayMat.put(0, 0, grayPixels); maskMat.put(0, 0, maskPixels); grayMat.copyTo(dstMat, maskMat); Imgproc.Canny(lineMat, lineMat, 20, 120); lineMat.copyTo(maskMat); Core.bitwise_not(lineMat, lineMat); lineMat.copyTo(dstMat, maskMat); // Imgproc.putText (dstMat, "W:" + dstMat.width () + " H:" + dstMat.height () + " SO:" + Screen.orientation, new Point (5, dstMat.rows () - 10), Core.FONT_HERSHEY_SIMPLEX, 1.0, new Scalar (0), 2, Imgproc.LINE_AA, false); // Imgproc.cvtColor(dstMat,rgbaMat,Imgproc.COLOR_GRAY2RGBA); // Utils.matToTexture2D (rgbaMat, texture); Utils.matToTexture2D(dstMat, texture, webCamTextureToMatHelper.GetBufferColors()); } }
internal static Mat Edge(Mat src, int thresh1, int thresh2) { Gray(src); Imgproc.GaussianBlur(src, src, new Size(3, 3), 0); Imgproc.Canny(src, src, thresh1, thresh2); Imgproc.cvtColor(src, src, Imgproc.COLOR_GRAY2RGBA); return(src); }
void ProcessImg(Texture2D texture, Mat mat, Mat edge) { Utils.texture2DToMat(texture, mat); Imgproc.resize(mat, mat, new Size(400, 400)); Imgproc.cvtColor(mat, mat, Imgproc.COLOR_RGB2BGRA); Imgproc.Canny(mat, edge, 10, 125); Debug.Log("Processed!"); }
void domainTransferDepthImage(Frame3DGPU f) { //Utils.setDebugMode(true); Debug.Log("Applying EdgeCleanup to Depth"); // convert from texture to mat Mat rgbMat = new Mat(); Core.flip(Util.toMat(f.postprocessedRGBImage, CvType.CV_8UC3), rgbMat, -1); Mat depthMat = Util.toMat(f.depthImage, CvType.CV_16UC1); Mat gray = new Mat(); Imgproc.cvtColor(rgbMat, gray, Imgproc.COLOR_RGBA2GRAY); Mat sobelX = new Mat(); Mat sobelY = new Mat(); Imgproc.Sobel(gray, sobelX, CvType.CV_16S, 1, 0, (int)ksize, sobelScale, 0, Core.BORDER_DEFAULT); Imgproc.Sobel(gray, sobelY, CvType.CV_16S, 0, 1, (int)ksize, sobelScale, 0, Core.BORDER_DEFAULT); Mat depthMat8bit = new Mat(); depthMat.convertTo(depthMat8bit, CvType.CV_8UC1, 0.03f); Core.bitwise_not(depthMat8bit, depthMat8bit); //Imgproc.equalizeHist(depthMat8bit, depthMat8bit); Mat depthFlipped = new Mat(); Core.flip(depthMat8bit, depthFlipped, -1); Mat canneyRslt = new Mat(); Imgproc.Canny(sobelX, sobelY, canneyRslt, cannyThreshold1, cannyThreshold2, true); //Imgcodecs.imwrite("C:/Users/SIGLab/AppData/LocalLow/Intel/Photo3D/3dImages/" + "depth.png", canneyRslt); //415 incomplete depth Mat cropped = depthFlipped.submat(0, 690, 0, 1190); Core.copyMakeBorder(cropped, depthFlipped, 0, 720 - 690, 0, 1280 - 1190, Core.BORDER_REPLICATE | Core.BORDER_ISOLATED); Mat laplacianRslt = new Mat(); Imgproc.Laplacian(gray, laplacianRslt, CvType.CV_32F, 5, .1, 0); Ximgproc.dtFilter(canneyRslt, depthFlipped, f.refinedDepth, sigmaSpacial, sigmaColor, Ximgproc.DTF_NC, dtIter); // Not working with built solutions, cant figure out why List <Mat> matList = new List <Mat>(); Mat depthLUT = Util.toMat(depthRescale, CvType.CV_8UC3); Core.split(depthLUT, matList); Mat temp = new Mat(); f.refinedDepth.convertTo(temp, CvType.CV_8UC1); Core.LUT(temp, matList[0], f.refinedDepth); //Utils.setDebugMode(false); }
void ConvertIntoDest(Texture2D srcTexture, RawImage destImage = null) { srcMat = new Mat(srcTexture.height, srcTexture.width, CvType.CV_8UC3); Utils.texture2DToMat(srcTexture, srcMat); destMat = new Mat(); srcMat.copyTo(destMat); Imgproc.cvtColor(destMat, destMat, Imgproc.COLOR_BGR2GRAY); // Imgproc.GaussianBlur(destMat,destMat,new Size(5,5) , 1); Imgproc.blur(destMat, destMat, new Size(low, low)); Imgproc.threshold(destMat, destMat, 120, 255, Imgproc.THRESH_BINARY); Imgproc.Canny(destMat, destMat, 20, 190); List <MatOfPoint> contours = new List <MatOfPoint>(); Imgproc.findContours(destMat, contours, new Mat(), Imgproc.RETR_EXTERNAL, Imgproc.CHAIN_APPROX_SIMPLE); int num = 0; List <MatOfPoint> contours_list = new List <MatOfPoint>(); for (int i = 0; i < contours.Count; i++) { double area = Imgproc.contourArea(contours[i]); print(area); // if(area > 1000 && area < 3000){ // contours_list.Add(contours[i]); // num = num + 1; // } if (area > 80) { contours_list.Add(contours[i]); num = num + 1; } } for (int i = 0; i < contours_list.Count; i++) { Imgproc.drawContours(srcMat, contours_list, -1, new Scalar(0, 255, 0), 4); } print("Number of valid contours detected : " + contours_list.Count.ToString()); infoText.text = "Detection : " + contours_list.Count.ToString(); Texture2D finalTexture = new Texture2D(srcMat.width(), srcMat.height(), TextureFormat.RGB24, false); Utils.matToTexture2D(srcMat, finalTexture); if (destImage == null) { srcImage.texture = finalTexture; } else { destImage.texture = finalTexture; // SaveTextureAsPNG(finalTexture,"CropImageOutput"); destImage.enabled = true; } }
void DrawEdge(Mat src, Mat dst) { Mat gray_img = new Mat(); Mat edge_img = new Mat(); Imgproc.cvtColor(src, gray_img, Imgproc.COLOR_BGR2GRAY); Imgproc.Canny(gray_img, edge_img, 100, 200); Imgproc.cvtColor(edge_img, dst, Imgproc.COLOR_GRAY2BGR); }
void Start() { srcMat = Imgcodecs.imread(Application.dataPath + "/Textures/lena.jpg", 1); //512,512 Imgproc.cvtColor(srcMat, srcMat, Imgproc.COLOR_BGR2RGB); Texture2D t2d = new Texture2D(srcMat.width(), srcMat.height()); Utils.matToTexture2D(srcMat, t2d); Sprite sp = Sprite.Create(t2d, new UnityEngine.Rect(0, 0, t2d.width, t2d.height), Vector2.zero); m_srcImage.sprite = sp; m_srcImage.preserveAspect = true; m_srcImage.rectTransform.offsetMin = new Vector2(0, 0); m_srcImage.rectTransform.offsetMax = new Vector2(t2d.width, t2d.height); m_srcImage.rectTransform.anchoredPosition = Vector2.zero; //--------------------------------------------------// dstMat = Imgcodecs.imread(Application.dataPath + "/Textures/0.jpg", 1); //500,500 Imgproc.cvtColor(dstMat, dstMat, Imgproc.COLOR_BGR2RGB); //dstMat = new Mat(); Mat grayMat = new Mat(); detected_edges = new Mat(); double threshold1 = 1; double threshold2 = 100; int kernel_size = 3; //使用 3x3内核降噪 Imgproc.cvtColor(srcMat, grayMat, Imgproc.COLOR_RGB2GRAY); Imgproc.blur(grayMat, detected_edges, new Size(3, 3)); Imgproc.Canny(detected_edges, detected_edges, threshold1, threshold2, kernel_size, false); //使用 Canny算子输出边缘作为掩码显示原图像 //dstMat.setTo(new Scalar(0)); Imgproc.resize(dstMat, dstMat, srcMat.size()); //srcMat.copyTo(dstMat, detected_edges); //保证srcMat,dstMat是一样大的 //左.copyTo(中, 右); //左作为像素源,右作为mask,合成到中 OpenCVForUnity.Rect rect = new OpenCVForUnity.Rect(25, 25, 125, 200); srcMat.submat(rect).copyTo(dstMat); Texture2D dst_t2d = new Texture2D(dstMat.width(), dstMat.height()); Utils.matToTexture2D(dstMat, dst_t2d); Sprite dst_sp = Sprite.Create(dst_t2d, new UnityEngine.Rect(0, 0, dst_t2d.width, dst_t2d.height), Vector2.zero); m_dstImage.sprite = dst_sp; m_dstImage.preserveAspect = true; m_dstImage.rectTransform.offsetMin = new Vector2(0, 0); m_dstImage.rectTransform.offsetMax = new Vector2(dst_t2d.width, dst_t2d.height); m_dstImage.rectTransform.anchoredPosition = Vector2.zero; }
// Update is called once per frame void Update() { Mat IRMat = Util.toMat((Texture2D)IRtexture, CvType.CV_8UC3); Mat canneyRslt = new Mat(); Imgproc.Canny(IRMat, canneyRslt, threshold1, threshold2); Texture2D cannyTexture = (Texture2D)Util.toTexture(canneyRslt, TextureFormat.R8); textureBinding.Invoke(cannyTexture); }
private void Start() { // Imgproc으로 이미지를 수정할 때마다 이미지가 반전된다 // Core.flip(inputMat, outputMat, 0)으로 원래대로 회전 할 수 있음 originMat = new Mat(roadTexture.height, roadTexture.width, CvType.CV_8UC4); Utils.texture2DToMat(roadTexture, originMat); DrawMat(quad_1, originMat); Debug.Log("LOAD " + roadTexture.width.ToString() + "x" + roadTexture.height.ToString() + " :: roadTexture"); outputMat = originMat.clone(); inputMat = originMat.clone(); // 원본 - > 흑백 grayMat = new Mat(); Imgproc.cvtColor(inputMat, grayMat, Imgproc.COLOR_BGR2GRAY); DrawMat(quad_2, grayMat); // 흑백 - > 가우스 필터 흑백 gaussianMat = new Mat(); Imgproc.GaussianBlur(grayMat, gaussianMat, gaussianMat.size(), 2, 2); DrawMat(quad_3, gaussianMat); // 가우스 필터 흑백 - > 테두리 contourMat = new Mat(); Imgproc.Canny(gaussianMat, contourMat, 50, 200); DrawMat(quad_4, contourMat); // 테두리 - > 관심영역 씌운 테두리 regionMat = WriteRegionOfInterest(contourMat); DrawMat(quad_5, regionMat); // 관심영역 씌운 테두리 - > hough 알고리즘으로 추출한 선 좌표 Matrix houghMat = new Mat(); Imgproc.HoughLinesP(regionMat, houghMat, 2, Mathf.PI / 180, 90, 120, 150); Debug.Log(houghMat.dump()); // 선 좌표 Matrix - > 선만 그려진 Mat lineMat = Mat.zeros(outputMat.rows(), outputMat.cols(), outputMat.type()); for (int x = 0; x < houghMat.rows(); x++) { Point pt1 = new Point(houghMat.get(x, 0)[0], houghMat.get(x, 0)[1]); Point pt2 = new Point(houghMat.get(x, 0)[2], houghMat.get(x, 0)[3]); Debug.Log(pt1.ToString() + "/" + pt2.ToString()); Imgproc.line(lineMat, pt1, pt2, new Scalar(255, 0, 0), 4, Imgproc.LINE_AA, 0); } // 선만 그려진 Mat와 원본을 합침 Core.addWeighted(lineMat, 0.8, inputMat, 1, 0, outputMat); DrawMat(quad_6, outputMat); }
void ProcessImg(Texture2D texture, Mat mat, Mat edge, double min, double max) { Utils.texture2DToMat(texture, mat); // Imgproc.resize(mat,mat,new Size(standardWidth,standardHeight)); Imgproc.cvtColor(mat, mat, Imgproc.COLOR_RGB2BGRA); Imgproc.GaussianBlur(mat, mat, new Size(5, 5), 1.4, 1.4); Imgproc.Canny(mat, edge, min, max); Debug.Log("Processed!"); // Debug.Log (dstMat.size()); // for (int i = 0; i < 50; i++) { // Debug.Log (dstMat.get(512+i,512+i)[0]); // Debug.Log (dstEdgeMat.get(512+i,512+i)[0]); // } }
void trackFilteredObject(SpellBook book, Mat threshold) { book.x = -1; book.y = -1; book.isTracked = false; Debug.Log("tracking " + book.color.ToString()); Mat temp = new Mat(); threshold.copyTo(temp); Imgproc.Canny(temp, temp, 50, 100); //these two vectors needed for output of findContours List <MatOfPoint> contours = new List <MatOfPoint>(); Mat hierarchy = new Mat(); //find contours of filtered image using openCV findContours function Imgproc.findContours(temp, contours, hierarchy, Imgproc.RETR_EXTERNAL, Imgproc.CHAIN_APPROX_SIMPLE); double max_area = MIN_OBJECT_AREA; // initialize //use moments method to find our filtered object if (hierarchy.rows() > 0) { int numObjects = contours.Count; //Debug.Log("numObj: " + numObjects.ToString()); //Debug.Log("hierarchy " + hierarchy.ToString()); for (int i = 0; i < numObjects; i++) { //Debug.Log("i = " + i.ToString()); Moments moment = Imgproc.moments(contours[i]); double area = moment.get_m00(); //we only want the object with the largest area so we save a reference area each //iteration and compare it to the area in the next iteration. if (area > max_area) { book.x = (int)(moment.get_m10() / area); book.y = (int)(moment.get_m01() / area); max_area = area; } } if (book.x != -1) { book.isTracked = true; } else { book.isTracked = false; } } }
///<summary> /// Detects number of electrons in the texture image and return new texture with /// contours and out with number of detected electrons ///</summary> public static Texture2D ApplyScanning(Texture2D srcTexture, int blurSize, out int detectionCount) { Mat srcMat = new Mat(srcTexture.height, srcTexture.width, CvType.CV_8UC3); Utils.texture2DToMat(srcTexture, srcMat); Mat destMat = new Mat(); srcMat.copyTo(destMat); Imgproc.cvtColor(destMat, destMat, Imgproc.COLOR_BGR2GRAY); // Imgproc.GaussianBlur(destMat,destMat,new Size(5,5) , 1); Imgproc.blur(destMat, destMat, new Size(blurSize, blurSize)); Imgproc.threshold(destMat, destMat, 120, 255, Imgproc.THRESH_BINARY); Imgproc.Canny(destMat, destMat, 20, 190); List <MatOfPoint> contours = new List <MatOfPoint>(); Imgproc.findContours(destMat, contours, new Mat(), Imgproc.RETR_EXTERNAL, Imgproc.CHAIN_APPROX_SIMPLE); int num = 0; List <MatOfPoint> contours_list = new List <MatOfPoint>(); for (int i = 0; i < contours.Count; i++) { double area = Imgproc.contourArea(contours[i]); // print(area); // if(area > 1000 && area < 3000){ // contours_list.Add(contours[i]); // num = num + 1; // } if (area > 80) { contours_list.Add(contours[i]); num = num + 1; } } detectionCount = num; for (int i = 0; i < contours_list.Count; i++) { Imgproc.drawContours(srcMat, contours_list, -1, new Scalar(0, 255, 0), 4); } Texture2D scannedTexture = new Texture2D(srcMat.width(), srcMat.height(), TextureFormat.RGB24, false); Utils.matToTexture2D(srcMat, scannedTexture); return(scannedTexture); }
// Update is called once per frame void Update() { Debug.Log("Applying EdgeCleanup to Depth"); Mat gray = new Mat(); Imgproc.cvtColor(color, gray, Imgproc.COLOR_RGBA2GRAY); Mat sobelX = new Mat(); Mat sobelY = new Mat(); Imgproc.Sobel(gray, sobelX, CvType.CV_16S, 1, 0, (int)ksize, sobelScale, 0, Core.BORDER_DEFAULT); Imgproc.Sobel(gray, sobelY, CvType.CV_16S, 0, 1, (int)ksize, sobelScale, 0, Core.BORDER_DEFAULT); Mat depth = Util.toMat((Texture2D)depthTexture, CvType.CV_8UC3); Mat depthFlipped = new Mat(); //Core.flip(depthMat, depthFlipped, -1); Mat depthMat8bit = new Mat(); depth.convertTo(depthMat8bit, CvType.CV_8UC1, 0.1f); //Core.bitwise_not(depthMat8bit,depthMat8bit); //Imgproc.equalizeHist(depthMat8bit, depthMat8bit); Mat canneyRslt = new Mat(); Imgproc.Canny(sobelX, sobelY, canneyRslt, cannyThreshold1, cannyThreshold2, true); Mat laplacianRslt = new Mat(); Imgproc.Laplacian(gray, laplacianRslt, CvType.CV_32F, 5, .1, 0); Mat DTF_NC = new Mat(); Ximgproc.dtFilter(canneyRslt, depthMat8bit, DTF_NC, sigmaSpacial, sigmaColor, Ximgproc.DTF_NC, dtIter); Texture2D yTexture = (Texture2D)Util.toTexture(sobelX, TextureFormat.R16); sobelTextureEvent.Invoke(yTexture); Texture2D canneyTexture = (Texture2D)Util.toTexture(canneyRslt, TextureFormat.R8); canneyTextureEvent.Invoke(canneyTexture); Texture2D depthtexture = (Texture2D)Util.toTexture(DTF_NC, TextureFormat.R8); improvedDepth.Invoke(depthtexture); }
IEnumerator CaptureScreenShot() { yield return(new WaitForEndOfFrame()); ObjectScreenVisible(false); // Captura imagem da tela Texture2D screenImageTexture = ScreenCapture.CaptureScreenshotAsTexture(2); // Salva a imagem original string pathObjectOriginal = GetImagePath(true); SaveImage(screenImageTexture, pathObjectOriginal); // Converte de Texture para Mat do OpenCV Mat screenImageMat = new Mat(screenImageTexture.height, screenImageTexture.width, CvType.CV_8UC4); Utils.texture2DToMat(screenImageTexture, screenImageMat); // Converte para tons de cinza Mat screenImageGrayMat = new Mat(screenImageMat.rows(), screenImageMat.cols(), CvType.CV_8UC4); Imgproc.cvtColor(screenImageMat, screenImageGrayMat, Imgproc.COLOR_RGBA2GRAY); // Usa o filtro de canny para identificar as bordas Mat resultCannyMat = new Mat(); Imgproc.Canny(screenImageGrayMat, resultCannyMat, 500, 600); // Invert as cores Mat resultInvertMat = new Mat(resultCannyMat.rows(), resultCannyMat.cols(), CvType.CV_8UC4); Core.bitwise_not(resultCannyMat, resultInvertMat); // Converte Mat para Texture do Unity Texture2D resultCannyTexture = new Texture2D(resultInvertMat.cols(), resultInvertMat.rows(), TextureFormat.ARGB32, false); Utils.matToTexture2D(resultInvertMat, resultCannyTexture); // Salva a imagem em bordas para ser detectado PropertiesModel.PathObjectDrawing = GetImagePath(false); SaveImage(resultCannyTexture, PropertiesModel.PathObjectDrawing); SaveInformationObject(); Destroy(resultCannyTexture); Destroy(screenImageTexture); }
void Update() { if (rgbaMat == null) { // Initialization is not yet complete. return; } if (gyro != null) { // Align the game-world gravity to real-world // gravity. Vector3 gravity = gyro.gravity; gravity.z = 0f; gravity = gravityMagnitude * gravity.normalized; Physics.gravity = gravity; } if (!webCamTexture.didUpdateThisFrame) { // No new frame is ready. return; } if (simulating) { // No new detection results are needed. return; } // Convert the RGBA image to OpenCV's format using // a utility function from OpenCV for Unity. Utils.webCamTextureToMat(webCamTexture, rgbaMat, colors); // Convert the OpenCV image to gray and // equalize it. Imgproc.cvtColor(rgbaMat, grayMat, Imgproc.COLOR_RGBA2GRAY); Imgproc.Canny(grayMat, cannyMat, 50.0, 200.0); UpdateCircles(); UpdateLines(); }
//Canny滤波 public Sprite CannyGradient() { Mat edge = new Mat(); double threshold1 = 0; double threshold2 = 100; Imgproc.blur(grayMat, edge, new Size(3, 3)); Imgproc.Canny(edge, edge, threshold1, threshold2); Core.convertScaleAbs(edge, dstMat); // Mat转Texture2D Texture2D t2d = new Texture2D(dstMat.cols(), dstMat.rows()); Utils.matToTexture2D(dstMat, t2d); Sprite sp = Sprite.Create(t2d, new UnityEngine.Rect(0, 0, t2d.width, t2d.height), Vector2.zero); return(sp); }
void Update() { if (rgbaMat == null) { // Initialization is not yet complete. return; } if (gyro != null) { //get games gravity and and convert it into real world gravity Vector3 gravity = gyro.gravity; gravity.z = 0f; gravity = gravityMagnitude * gravity.normalized; Physics.gravity = gravity; } if (!webCamTexture.didUpdateThisFrame) { // No new frame is ready. return; } if (simulating) { // No new detection results are needed. return; } // Convert the RGBA image to open cv format Utils.webCamTextureToMat(webCamTexture, rgbaMat, colors); // Convert the OpenCV image to gray scale Imgproc.cvtColor(rgbaMat, grayMat, Imgproc.COLOR_RGBA2GRAY); Imgproc.Canny(grayMat, cannyMat, 50.0, 200.0); Imgproc.equalizeHist(grayMat, grayMat); UpdateCircles(); UpdateLines(); }
//finds lines in image void FindLines() { Mat mainMat = new Mat(baseTexture.height, baseTexture.width, CvType.CV_8UC3); Mat grayMat = new Mat(); //Convert Texture2d to Matrix Utils.texture2DToMat(baseTexture, mainMat); //copy main matrix to grayMat mainMat.copyTo(grayMat); //Convert color to gray Imgproc.cvtColor(grayMat, grayMat, Imgproc.COLOR_BGR2GRAY); //Convert to canny edges Mat edges = new Mat(); Imgproc.Canny(grayMat, edges, 50, 200); //convert to lines Mat lines = new Mat(edges.rows(), edges.cols(), CvType.CV_32SC4); int minLineLength = 50; int maxLineGap = 150; Imgproc.HoughLinesP(edges, lines, 1, Mathf.PI / 180, 75, minLineLength, maxLineGap); Debug.Log("lines.toString() " + lines.ToString()); Debug.Log("lines.dump()" + lines.dump()); int[] linesArray = new int[lines.cols() * lines.rows() * lines.channels()]; lines.get(0, 0, linesArray); for (int i = 0; i < linesArray.Length; i = i + 4) { Imgproc.line(grayMat, new Point(linesArray[i + 0], linesArray[i + 1]), new Point(linesArray[i + 2], linesArray[i + 3]), new Scalar(255, 0, 0), 2); } Texture2D finalTexture = new Texture2D(grayMat.cols(), grayMat.rows(), TextureFormat.RGB24, false); Utils.matToTexture2D(grayMat, finalTexture); targetRawImage.texture = finalTexture; }
void Update() { if (webCamTextureToMatHelper.IsPlaying() && webCamTextureToMatHelper.DidUpdateThisFrame() && initialized) { Mat rgbaMat = webCamTextureToMatHelper.GetMat(); Imgproc.cvtColor(rgbaMat, grayMat, Imgproc.COLOR_RGBA2GRAY); bgMat.copyTo(dstMat); Imgproc.GaussianBlur(grayMat, lineMat, new Size(3, 3), 0); grayMat.get(0, 0, grayPixels); for (int i = 0; i < grayPixels.Length; i++) { maskPixels [i] = 0; if (grayPixels [i] < 70) { grayPixels [i] = 0; maskPixels [i] = 1; } else if (70 <= grayPixels [i] && grayPixels [i] < 120) { grayPixels [i] = 100; } else { grayPixels [i] = 255; maskPixels [i] = 1; } } grayMat.put(0, 0, grayPixels); maskMat.put(0, 0, maskPixels); grayMat.copyTo(dstMat, maskMat); Imgproc.Canny(lineMat, lineMat, 20, 120); lineMat.copyTo(maskMat); Core.bitwise_not(lineMat, lineMat); lineMat.copyTo(dstMat, maskMat); Utils.matToTexture2D(dstMat, texture); //, webCamTextureToMatHelper.GetBufferColors()); } }
// Use this for initialization void Start() { Texture2D imgTexture = Resources.Load("chessboard") as Texture2D; Mat imgMat = new Mat(imgTexture.height, imgTexture.width, CvType.CV_8UC3); Utils.texture2DToMat(imgTexture, imgMat); Debug.Log("imgMat dst ToString " + imgMat.ToString()); Mat grayMat = new Mat(); Imgproc.cvtColor(imgMat, grayMat, Imgproc.COLOR_RGB2GRAY); Imgproc.Canny(grayMat, grayMat, 50, 200); Mat lines = new Mat(); Imgproc.HoughLinesP(grayMat, lines, 1, Mathf.PI / 180, 50, 50, 10); // Debug.Log ("lines toStirng " + lines.ToString ()); // Debug.Log ("lines dump" + lines.dump ()); int[] linesArray = new int[lines.cols() * lines.rows() * lines.channels()]; lines.get(0, 0, linesArray); for (int i = 0; i < linesArray.Length; i = i + 4) { Core.line(imgMat, new Point(linesArray [i + 0], linesArray [i + 1]), new Point(linesArray [i + 2], linesArray [i + 3]), new Scalar(255, 0, 0), 2); } Texture2D texture = new Texture2D(imgMat.cols(), imgMat.rows(), TextureFormat.RGBA32, false); Utils.matToTexture2D(imgMat, texture); gameObject.GetComponent <Renderer> ().material.mainTexture = texture; }
private List <MatOfPoint> GetContours(Mat newImage) { Mat edges = new Mat(); Mat hierarchy = new Mat(); List <MatOfPoint> contours = new List <MatOfPoint>(); List <MatOfPoint> bigContours = new List <MatOfPoint>(); Imgproc.GaussianBlur(newImage, newImage, new Size(3, 3), 1); Imgproc.Canny(newImage, edges, 75, 255); Imgproc.dilate(edges, edges, Imgproc.getStructuringElement(Imgproc.MORPH_RECT, new Size(1, 1)), new Point(-1, -1), 2); Imgproc.findContours(edges, contours, hierarchy, Imgproc.RETR_EXTERNAL, Imgproc.CHAIN_APPROX_SIMPLE); foreach (var contour in contours) { double area = Imgproc.contourArea(contour); if (area > 500) { bigContours.Add(contour); } } return(bigContours); }
private void updateFilter(byte[] framebuffer) { rgbaMat.put(0, 0, framebuffer); Imgproc.cvtColor(rgbaMat, grayMat, Imgproc.COLOR_RGBA2GRAY); bgMat.copyTo(dstMat); Imgproc.GaussianBlur(grayMat, lineMat, new Size(3, 3), 0); grayMat.get(0, 0, grayPixels); for (int i = 0; i < grayPixels.Length; i++) { maskPixels [i] = 0; if (grayPixels [i] < 70) { grayPixels [i] = 0; maskPixels [i] = 1; } else if (70 <= grayPixels [i] && grayPixels [i] < 120) { grayPixels [i] = 100; } else { grayPixels [i] = 255; maskPixels [i] = 1; } } grayMat.put(0, 0, grayPixels); maskMat.put(0, 0, maskPixels); grayMat.copyTo(dstMat, maskMat); Imgproc.Canny(lineMat, lineMat, 20, 120); lineMat.copyTo(maskMat); Core.bitwise_not(lineMat, lineMat); lineMat.copyTo(dstMat, maskMat); Utils.matToTexture2D(dstMat, texture); }
void ComputerVisionAlgo(IntPtr greyscale) { Utils.copyToMat(greyscale, imageMat); // Imgproc.threshold(imageMat, outMat, 128, 255, Imgproc.THRESH_BINARY_INV); Imgproc.Canny(imageMat, edgeMat, 90, 150); outMat = edgeMat; Imgproc.HoughCircles(imageMat, circMat, Imgproc.HOUGH_GRADIENT, 1.0, 20.0); Debug.LogFormat("Circle Metadata {0}", circMat.ToString()); Debug.Log(circMat.size()); if (circMat.size() == null_size) { Debug.Log("No circles found"); } else { double[] c_data = circMat.get(0, 0); Debug.LogFormat("Circle Center: {0} x {1} \n Circle Radius: {2}", c_data[0], c_data[1], c_data[2]); } Debug.Log(circMat.size().width); // Debug.LogFormat("Circle 1: {0} x {1} -- {2}", // circMat.get(0, 0)[0], circMat.get(0, 1)[0], circMat.get(0, 2)[0]); // for (int i = 0; i < 5; i++) // { // Point center = Point(circMat[i][0], circMat[i][1]); // int radius = circMat[i][2]; // circle(imageMat, center, 3, Scalar(0, 255, 0), -1, 8); // circle(imageMat, center, radius, Scalar(0, 0, 255), 3, 8); // } // Debug.LogFormat("Mat Dimensions: {0} x {1}", imageMat.cols(), imageMat.rows()); }
private IList <MatOfPoint> ProcessImage() { Mat grayMat = new Mat(); Mat blurMat = new Mat(); Mat edgesMat = new Mat(); Mat final = new Mat(); Mat h = new Mat(); IList <MatOfPoint> contours = new JavaList <MatOfPoint>(); OpenCV.Android.Utils.BitmapToMat(originalImage, originalMat); originalImage.Dispose(); Imgproc.CvtColor(originalMat, grayMat, Imgproc.ColorBgr2gray); Imgproc.GaussianBlur(grayMat, blurMat, new OpenCV.Core.Size(3, 3), 0); Imgproc.Canny(blurMat, edgesMat, 10, 250); Mat kernel = Imgproc.GetStructuringElement(Imgproc.MorphRect, new Size(3, 3)); Imgproc.MorphologyEx(edgesMat, final, Imgproc.MorphClose, kernel); Imgproc.FindContours(final, contours, h, Imgproc.RetrExternal, Imgproc.ChainApproxSimple); return(contours); }
//findContours分割技术 private static Mat MyFindLargestRectangle(Mat original_image) { Mat imgSource = original_image; Imgproc.cvtColor(imgSource, imgSource, Imgproc.COLOR_BGR2GRAY); Imgproc.Canny(imgSource, imgSource, 50, 50); Imgproc.GaussianBlur(imgSource, imgSource, new Size(5, 5), 5); List <MatOfPoint> contours = new List <MatOfPoint>(); Imgproc.findContours(imgSource, contours, new Mat(), Imgproc.RETR_LIST, Imgproc.CHAIN_APPROX_SIMPLE); double maxArea = 0; int maxAreaIdx = -1; MatOfPoint largest_contour = contours[0]; MatOfPoint2f approxCurve = new MatOfPoint2f(); for (int idx = 0; idx < contours.Count; idx++) { MatOfPoint temp_contour = contours[idx]; double contourarea = Imgproc.contourArea(temp_contour); if (contourarea - maxArea > 1) { maxArea = contourarea; largest_contour = temp_contour; maxAreaIdx = idx; MatOfPoint2f new_mat = new MatOfPoint2f(temp_contour.toArray()); int contourSize = (int)temp_contour.total(); Imgproc.approxPolyDP(new_mat, approxCurve, contourSize * 0.05, true); } } Imgproc.drawContours(imgSource, contours, -1, new Scalar(255, 0, 0), 1); Imgproc.fillConvexPoly(imgSource, largest_contour, new Scalar(255, 255, 255)); Imgproc.drawContours(imgSource, contours, maxAreaIdx, new Scalar(0, 0, 255), 3); return(imgSource); }