//索贝尔算子锐化 void OnSobelSharpen(bool value) { m_blurImage.enabled = true; if (!value) { return; } Mat dstMat = new Mat(); Mat dst_x = new Mat(); Mat abs_dst_x = new Mat(); Mat dst_y = new Mat(); Mat abs_dst_y = new Mat(); //水平方向梯度 Imgproc.Sobel(srcMat, dst_x, srcMat.depth(), 1, 0, 3, 1, 0); //垂直方向梯度 Imgproc.Sobel(srcMat, dst_y, srcMat.depth(), 0, 1, 3, 1, 0); Core.convertScaleAbs(dst_x, abs_dst_x); //先缩放元素再取绝对值 Core.convertScaleAbs(dst_y, abs_dst_y); Core.addWeighted(abs_dst_x, 0.5d, abs_dst_y, 0.5d, 0, dstMat); //x,y方向梯度,平均叠加融合 Core.addWeighted(dstMat, 0.5d, srcMat, 1d, 0, dstMat); //把边缘叠加到原图 Texture2D t2d = new Texture2D(dstMat.width(), dstMat.height()); Sprite sp = Sprite.Create(t2d, new UnityEngine.Rect(0, 0, t2d.width, t2d.height), Vector2.zero); m_blurImage.sprite = sp; m_blurImage.preserveAspect = true; Utils.matToTexture2D(dstMat, t2d); }
//索贝尔滤波 public Sprite SobelGradient() { Mat grad_x = new Mat(); Mat grad_y = new Mat(); Mat abs_grad_x = new Mat(); Mat abs_grad_y = new Mat(); // 计算水平方向梯度 Imgproc.Sobel(grayMat, grad_x, CvType.CV_16S, 1, 0, 3, 1, 0); // 计算垂直方向梯度 Imgproc.Sobel(grayMat, grad_y, CvType.CV_16S, 0, 1, 3, 1, 0); // 计算两个方向上的梯度的绝对值 Core.convertScaleAbs(grad_x, abs_grad_x); Core.convertScaleAbs(grad_y, abs_grad_y); // 计算结果梯度 Core.addWeighted(abs_grad_x, 0.5, abs_grad_y, 0.5, 1, dstMat); // Mat转Texture2D Texture2D t2d = new Texture2D(dstMat.cols(), dstMat.rows()); Utils.matToTexture2D(dstMat, t2d); Sprite sp = Sprite.Create(t2d, new UnityEngine.Rect(0, 0, t2d.width, t2d.height), Vector2.zero); return(sp); }
public static Bitmap getEdgeDetectedImage(this Bitmap src, Filter_Type filter_type) { Bitmap resizedBitmap = Bitmap.CreateScaledBitmap(src, (src.Width * 256) / src.Height, 256, true); OpenCV.Core.Mat resizedMat = new OpenCV.Core.Mat(); OpenCV.Android.Utils.BitmapToMat(resizedBitmap, resizedMat); OpenCV.Core.Mat gaussianMat = new OpenCV.Core.Mat(); Imgproc.GaussianBlur(resizedMat, gaussianMat, new OpenCV.Core.Size(3, 3), 0, 0); OpenCV.Core.Mat grayMat = new OpenCV.Core.Mat(); Imgproc.CvtColor(gaussianMat, grayMat, Imgproc.ColorRgba2gray, 2); OpenCV.Core.Mat edgeDetectedMat = new OpenCV.Core.Mat(); if (filter_type == Filter_Type.CANNY) { Imgproc.Canny(grayMat, edgeDetectedMat, 100, 100); } else { OpenCV.Core.Mat sobelMat = new OpenCV.Core.Mat(); Imgproc.Sobel(grayMat, sobelMat, CvType.Cv8u, 1, 1); Core.ConvertScaleAbs(sobelMat, edgeDetectedMat, 6, 1); } Bitmap resultBitmap = Bitmap.CreateBitmap(resizedBitmap.Width, resizedBitmap.Height, Bitmap.Config.Argb8888); OpenCV.Android.Utils.MatToBitmap(edgeDetectedMat, resultBitmap); return(resultBitmap); }
void Update() { //ウェブカメラのフレームが変更されたら処理 if (hogeWebCamTexture.didUpdateThisFrame) { //ウェブカメラの画像をMatに変換 Mat originMat = new Mat(hogeWebCamTexture.height, hogeWebCamTexture.width, CvType.CV_8UC4); Utils.webCamTextureToMat(hogeWebCamTexture, originMat); //画像処理先Mat Mat changeMat = new Mat(originMat.cols(), originMat.rows(), CvType.CV_8UC4); //グレースケール処理 Imgproc.cvtColor(originMat, changeMat, Imgproc.COLOR_RGB2GRAY); //二値化(数値は各々で変更) Imgproc.threshold(changeMat, changeMat, 100, 255, Imgproc.THRESH_BINARY); //輪郭抽出 Imgproc.Sobel(changeMat, changeMat, -1, 1, 0); //Matをテクスチャに変換 Texture2D endTexture = new Texture2D(changeMat.cols(), changeMat.rows(), TextureFormat.RGBA32, false); Utils.matToTexture2D(changeMat, endTexture); //映像出力先にテクスチャ貼付け HogeOutputRaw.texture = endTexture; } }
void domainTransferDepthImage(Frame3DGPU f) { //Utils.setDebugMode(true); Debug.Log("Applying EdgeCleanup to Depth"); // convert from texture to mat Mat rgbMat = new Mat(); Core.flip(Util.toMat(f.postprocessedRGBImage, CvType.CV_8UC3), rgbMat, -1); Mat depthMat = Util.toMat(f.depthImage, CvType.CV_16UC1); Mat gray = new Mat(); Imgproc.cvtColor(rgbMat, gray, Imgproc.COLOR_RGBA2GRAY); Mat sobelX = new Mat(); Mat sobelY = new Mat(); Imgproc.Sobel(gray, sobelX, CvType.CV_16S, 1, 0, (int)ksize, sobelScale, 0, Core.BORDER_DEFAULT); Imgproc.Sobel(gray, sobelY, CvType.CV_16S, 0, 1, (int)ksize, sobelScale, 0, Core.BORDER_DEFAULT); Mat depthMat8bit = new Mat(); depthMat.convertTo(depthMat8bit, CvType.CV_8UC1, 0.03f); Core.bitwise_not(depthMat8bit, depthMat8bit); //Imgproc.equalizeHist(depthMat8bit, depthMat8bit); Mat depthFlipped = new Mat(); Core.flip(depthMat8bit, depthFlipped, -1); Mat canneyRslt = new Mat(); Imgproc.Canny(sobelX, sobelY, canneyRslt, cannyThreshold1, cannyThreshold2, true); //Imgcodecs.imwrite("C:/Users/SIGLab/AppData/LocalLow/Intel/Photo3D/3dImages/" + "depth.png", canneyRslt); //415 incomplete depth Mat cropped = depthFlipped.submat(0, 690, 0, 1190); Core.copyMakeBorder(cropped, depthFlipped, 0, 720 - 690, 0, 1280 - 1190, Core.BORDER_REPLICATE | Core.BORDER_ISOLATED); Mat laplacianRslt = new Mat(); Imgproc.Laplacian(gray, laplacianRslt, CvType.CV_32F, 5, .1, 0); Ximgproc.dtFilter(canneyRslt, depthFlipped, f.refinedDepth, sigmaSpacial, sigmaColor, Ximgproc.DTF_NC, dtIter); // Not working with built solutions, cant figure out why List <Mat> matList = new List <Mat>(); Mat depthLUT = Util.toMat(depthRescale, CvType.CV_8UC3); Core.split(depthLUT, matList); Mat temp = new Mat(); f.refinedDepth.convertTo(temp, CvType.CV_8UC1); Core.LUT(temp, matList[0], f.refinedDepth); //Utils.setDebugMode(false); }
// Update is called once per frame void Update() { Mat LeftIRMat = Util.toMat((Texture2D)LeftIrTexture, CvType.CV_8UC1); Mat RightIRMat = Util.toMat((Texture2D)LeftIrTexture, CvType.CV_8UC1); Mat XLeft = new Mat(); Mat YLeft = new Mat(); Mat XRight = new Mat(); Mat YRight = new Mat(); Imgproc.Sobel(LeftIRMat, XLeft, CvType.CV_32F, 1, 0, (int)ksize, scale); Imgproc.Sobel(LeftIRMat, YLeft, CvType.CV_32F, 0, 1, (int)ksize, scale); Imgproc.Sobel(RightIRMat, XRight, CvType.CV_32F, 1, 0, (int)ksize, scale); Imgproc.Sobel(RightIRMat, YRight, CvType.CV_32F, 0, 1, (int)ksize, scale); Mat x = XLeft + XRight; Mat y = XRight + (YRight); DenseOpticalFlow opticalFlow = DISOpticalFlow.create(DISOpticalFlow.PRESET_MEDIUM); //OpenCVForUnity.VideoModule.DenseOpticalFlow opticalFlow = new OpenCVForUnity.VideoModule.DenseOpticalFlow(IntPtr.Zero); Texture2D xTexture = (Texture2D)Util.toTexture(x, TextureFormat.RFloat); xTextureEvent.Invoke(xTexture); Texture2D yTexture = (Texture2D)Util.toTexture(y, TextureFormat.RFloat); yTextureEvent.Invoke(yTexture); Mat x8Bit = new Mat(); Mat y8Bit = new Mat(); x.convertTo(x8Bit, CvType.CV_8UC1, 0.1f); y.convertTo(y8Bit, CvType.CV_8UC1, 0.1f); Mat flow = new Mat(); opticalFlow.calc(y8Bit, x8Bit, flow); List <Mat> planes = new List <Mat> (); // split Core.split(flow, planes); Texture2D flowTexture = new Texture2D(LeftIrTexture.width, LeftIrTexture.height, TextureFormat.RFloat, false); Utils.fastMatToTexture2D(planes[(int)flowPlane], flowTexture); flowTextureEvent.Invoke(flowTexture); //Mat canneyRslt = new Mat(); //Imgproc.Canny(,,IRMat, canneyRslt, threshold1, threshold2); //Imgproc.Canny(IRMat, canneyRslt, threshold1, threshold2); }
// Update is called once per frame void Update() { Debug.Log("Applying EdgeCleanup to Depth"); Mat gray = new Mat(); Imgproc.cvtColor(color, gray, Imgproc.COLOR_RGBA2GRAY); Mat sobelX = new Mat(); Mat sobelY = new Mat(); Imgproc.Sobel(gray, sobelX, CvType.CV_16S, 1, 0, (int)ksize, sobelScale, 0, Core.BORDER_DEFAULT); Imgproc.Sobel(gray, sobelY, CvType.CV_16S, 0, 1, (int)ksize, sobelScale, 0, Core.BORDER_DEFAULT); Mat depth = Util.toMat((Texture2D)depthTexture, CvType.CV_8UC3); Mat depthFlipped = new Mat(); //Core.flip(depthMat, depthFlipped, -1); Mat depthMat8bit = new Mat(); depth.convertTo(depthMat8bit, CvType.CV_8UC1, 0.1f); //Core.bitwise_not(depthMat8bit,depthMat8bit); //Imgproc.equalizeHist(depthMat8bit, depthMat8bit); Mat canneyRslt = new Mat(); Imgproc.Canny(sobelX, sobelY, canneyRslt, cannyThreshold1, cannyThreshold2, true); Mat laplacianRslt = new Mat(); Imgproc.Laplacian(gray, laplacianRslt, CvType.CV_32F, 5, .1, 0); Mat DTF_NC = new Mat(); Ximgproc.dtFilter(canneyRslt, depthMat8bit, DTF_NC, sigmaSpacial, sigmaColor, Ximgproc.DTF_NC, dtIter); Texture2D yTexture = (Texture2D)Util.toTexture(sobelX, TextureFormat.R16); sobelTextureEvent.Invoke(yTexture); Texture2D canneyTexture = (Texture2D)Util.toTexture(canneyRslt, TextureFormat.R8); canneyTextureEvent.Invoke(canneyTexture); Texture2D depthtexture = (Texture2D)Util.toTexture(DTF_NC, TextureFormat.R8); improvedDepth.Invoke(depthtexture); }
public Mat OnCameraFrame(CameraBridgeViewBase.ICvCameraViewFrame inputFrame) { Mat rgba = inputFrame.Rgba(); Size sizeRgba = rgba.Size(); Mat rgbaInnerWindow; int rows = (int)sizeRgba.Height; int cols = (int)sizeRgba.Width; int left = cols / 8; int top = rows / 8; int width = cols * 3 / 4; int height = rows * 3 / 4; switch (ImageManipulationsActivity.viewMode) { case ImageManipulationsActivity.VIEW_MODE_RGBA: break; case ImageManipulationsActivity.VIEW_MODE_HIST: Mat hist = new Mat(); int thikness = (int)(sizeRgba.Width / (mHistSizeNum + 10) / 5); if (thikness > 5) { thikness = 5; } int offset = (int)((sizeRgba.Width - (5 * mHistSizeNum + 4 * 10) * thikness) / 2); // RGB for (int c = 0; c < 3; c++) { Imgproc.CalcHist(Arrays.AsList(rgba).Cast <Mat>().ToList(), mChannels[c], mMat0, hist, mHistSize, mRanges); Core.Normalize(hist, hist, sizeRgba.Height / 2, 0, Core.NormInf); hist.Get(0, 0, mBuff); for (int h = 0; h < mHistSizeNum; h++) { mP1.X = mP2.X = offset + (c * (mHistSizeNum + 10) + h) * thikness; mP1.Y = sizeRgba.Height - 1; mP2.Y = mP1.Y - 2 - (int)mBuff[h]; Imgproc.Line(rgba, mP1, mP2, mColorsRGB[c], thikness); } } // Value and Hue Imgproc.CvtColor(rgba, mIntermediateMat, Imgproc.ColorRgb2hsvFull); // Value Imgproc.CalcHist(Arrays.AsList(mIntermediateMat).Cast <Mat>().ToList(), mChannels[2], mMat0, hist, mHistSize, mRanges); Core.Normalize(hist, hist, sizeRgba.Height / 2, 0, Core.NormInf); hist.Get(0, 0, mBuff); for (int h = 0; h < mHistSizeNum; h++) { mP1.X = mP2.X = offset + (3 * (mHistSizeNum + 10) + h) * thikness; mP1.Y = sizeRgba.Height - 1; mP2.Y = mP1.Y - 2 - (int)mBuff[h]; Imgproc.Line(rgba, mP1, mP2, mWhilte, thikness); } // Hue Imgproc.CalcHist(Arrays.AsList(mIntermediateMat).Cast <Mat>().ToList(), mChannels[0], mMat0, hist, mHistSize, mRanges); Core.Normalize(hist, hist, sizeRgba.Height / 2, 0, Core.NormInf); hist.Get(0, 0, mBuff); for (int h = 0; h < mHistSizeNum; h++) { mP1.X = mP2.X = offset + (4 * (mHistSizeNum + 10) + h) * thikness; mP1.Y = sizeRgba.Height - 1; mP2.Y = mP1.Y - 2 - (int)mBuff[h]; Imgproc.Line(rgba, mP1, mP2, mColorsHue[h], thikness); } break; case ImageManipulationsActivity.VIEW_MODE_CANNY: rgbaInnerWindow = rgba.Submat(top, top + height, left, left + width); Imgproc.Canny(rgbaInnerWindow, mIntermediateMat, 80, 90); Imgproc.CvtColor(mIntermediateMat, rgbaInnerWindow, Imgproc.ColorGray2bgra, 4); rgbaInnerWindow.Release(); break; case ImageManipulationsActivity.VIEW_MODE_SOBEL: Mat gray = inputFrame.Gray(); Mat grayInnerWindow = gray.Submat(top, top + height, left, left + width); rgbaInnerWindow = rgba.Submat(top, top + height, left, left + width); Imgproc.Sobel(grayInnerWindow, mIntermediateMat, CvType.Cv8u, 1, 1); Core.ConvertScaleAbs(mIntermediateMat, mIntermediateMat, 10, 0); Imgproc.CvtColor(mIntermediateMat, rgbaInnerWindow, Imgproc.ColorGray2bgra, 4); grayInnerWindow.Release(); rgbaInnerWindow.Release(); break; case ImageManipulationsActivity.VIEW_MODE_SEPIA: rgbaInnerWindow = rgba.Submat(top, top + height, left, left + width); Core.Transform(rgbaInnerWindow, rgbaInnerWindow, mSepiaKernel); rgbaInnerWindow.Release(); break; case ImageManipulationsActivity.VIEW_MODE_ZOOM: Mat zoomCorner = rgba.Submat(0, rows / 2 - rows / 10, 0, cols / 2 - cols / 10); Mat mZoomWindow = rgba.Submat(rows / 2 - 9 * rows / 100, rows / 2 + 9 * rows / 100, cols / 2 - 9 * cols / 100, cols / 2 + 9 * cols / 100); Imgproc.Resize(mZoomWindow, zoomCorner, zoomCorner.Size()); Size wsize = mZoomWindow.Size(); Imgproc.Rectangle(mZoomWindow, new Point(1, 1), new Point(wsize.Width - 2, wsize.Height - 2), new Scalar(255, 0, 0, 255), 2); zoomCorner.Release(); mZoomWindow.Release(); break; case ImageManipulationsActivity.VIEW_MODE_PIXELIZE: rgbaInnerWindow = rgba.Submat(top, top + height, left, left + width); Imgproc.Resize(rgbaInnerWindow, mIntermediateMat, mSize0, 0.1, 0.1, Imgproc.InterNearest); Imgproc.Resize(mIntermediateMat, rgbaInnerWindow, rgbaInnerWindow.Size(), 0.0, 0.0, Imgproc.InterNearest); rgbaInnerWindow.Release(); break; case ImageManipulationsActivity.VIEW_MODE_POSTERIZE: /* * Imgproc.cvtColor(rgbaInnerWindow, mIntermediateMat, Imgproc.COLOR_RGBA2RGB); * Imgproc.pyrMeanShiftFiltering(mIntermediateMat, mIntermediateMat, 5, 50); * Imgproc.cvtColor(mIntermediateMat, rgbaInnerWindow, Imgproc.COLOR_RGB2RGBA); */ rgbaInnerWindow = rgba.Submat(top, top + height, left, left + width); Imgproc.Canny(rgbaInnerWindow, mIntermediateMat, 80, 90); rgbaInnerWindow.SetTo(new Scalar(0, 0, 0, 255), mIntermediateMat); Core.ConvertScaleAbs(rgbaInnerWindow, mIntermediateMat, 1.0 / 16, 0); Core.ConvertScaleAbs(mIntermediateMat, rgbaInnerWindow, 16, 0); rgbaInnerWindow.Release(); break; } return(rgba); }