public List <Point[]> detect(Mat im, float scaleFactor, int minNeighbours, OpenCVForUnity.Size minSize) { //convert image to greyscale Mat gray = null; if (im.channels() == 1) { gray = im; } else { gray = new Mat(); Imgproc.cvtColor(im, gray, Imgproc.COLOR_RGBA2GRAY); } using (Mat equalizeHistMat = new Mat()) using (MatOfRect faces = new MatOfRect()) { Imgproc.equalizeHist(gray, equalizeHistMat); detector.detectMultiScale(equalizeHistMat, faces, scaleFactor, minNeighbours, 0 | Objdetect.CASCADE_FIND_BIGGEST_OBJECT | Objdetect.CASCADE_SCALE_IMAGE, minSize, new Size()); if (faces.rows() < 1) { return(new List <Point[]> ()); } return(convertMatOfRectToPoints(faces)); } }
public Point[] calc_peaks(Mat im, Point[] points, OpenCVForUnity.Size ssize) { int n = points.Length; // Debug.Log ("n == int(patches.size()) " + patches.Count); using (Mat pt = (new MatOfPoint2f(points)).reshape(1, 2 * n)) using (Mat S = calc_simil(pt)) using (Mat Si = inv_simil(S)) { float[] pt_float = new float[pt.total()]; Utils.copyFromMat <float>(pt, pt_float); int pt_cols = pt.cols(); float[] S_float = new float[S.total()]; Utils.copyFromMat <float>(S, S_float); int S_cols = S.cols(); float[] A_float = new float[2 * 3]; Point[] pts = apply_simil(Si, points); for (int i = 0; i < n; i++) { OpenCVForUnity.Size wsize = new OpenCVForUnity.Size(ssize.width + patches [i].patch_size().width, ssize.height + patches [i].patch_size().height); using (Mat A = new Mat(2, 3, CvType.CV_32F)) { Utils.copyFromMat <float>(A, A_float); int A_cols = A.cols(); A_float [0] = S_float [0]; A_float [1] = S_float [1]; A_float [1 * A_cols] = S_float [1 * S_cols]; A_float [(1 * A_cols) + 1] = S_float [(1 * S_cols) + 1]; A_float [2] = (float)(pt_float [(2 * pt_cols) * i] - (A_float [0] * (wsize.width - 1) / 2 + A_float [1] * (wsize.height - 1) / 2)); A_float [(1 * A_cols) + 2] = (float)(pt_float [((2 * pt_cols) * i) + 1] - (A_float [1 * A_cols] * (wsize.width - 1) / 2 + A_float [(1 * A_cols) + 1] * (wsize.height - 1) / 2)); Utils.copyToMat(A_float, A); using (Mat I = new Mat()) { Imgproc.warpAffine(im, I, A, wsize, Imgproc.INTER_LINEAR + Imgproc.WARP_INVERSE_MAP); using (Mat R = patches [i].calc_response(I, false)) { Core.MinMaxLocResult minMaxLocResult = Core.minMaxLoc(R); pts [i].x = pts [i].x + minMaxLocResult.maxLoc.x - 0.5 * ssize.width; pts [i].y = pts [i].y + minMaxLocResult.maxLoc.y - 0.5 * ssize.height; } } } } return(apply_simil(S, pts)); } }
void Awake() { fa = 160; //opencv函数中用来区分红色与其他颜色的阈值。 mycap = new WebCamTexture(); //初始化摄像头 WebCamDevice[] devices = WebCamTexture.devices; //定义并获取摄像头数组 deviceName = devices[0].name; //获取当前摄像头名字 mycap = new WebCamTexture(deviceName, 640, 480); //用摄像头名字,宽度与高度来初始化摄像头 tu = new Mat(480, 640, CvType.CV_8UC3); //用高度,宽度,图像类别来初始化图片 a = new Mat(480, 640, CvType.CV_8UC3); //定义YUV空间的图片变量 b = new Mat(480, 640, CvType.CV_8UC1); //存放U通道的图像信息。 b1 = new Mat(480, 640, CvType.CV_8UC1); //存放变量b的备份 c = new List <Mat>(); //定义一个图像数组 size = new Size(50, 50); //将图片大小初始化为50*50的 color = new Scalar(255); //将颜色变量初始化为白色 hier = new Mat(); //findContours函数所需要的参数,无意义 rect1 = new OpenCVForUnity.Rect(1, 1, 1, 1); //定义一个手型轮廓边框(矩形框)变量 shishi = new Mat(50, 50, CvType.CV_8UC1); //定义一个新的图像变量,用来接收jie变量图像的缩放图像 }
IEnumerator GetPicture() { yield return(new WaitForEndOfFrame()); //攝影機讀取到的Frame繪製完畢後才進行拍照 //calculate mat of selected region and whole rawimage OpenCVForUnity.Mat srcMat = new OpenCVForUnity.Mat(4, 1, CvType.CV_32FC2); OpenCVForUnity.Mat dstMat = new OpenCVForUnity.Mat(4, 1, CvType.CV_32FC2); dstMat.put(0, 0, rectTopLeft.x - rectBotLeft.x, rectTopLeft.y - rectBotLeft.y, rectTopRight.x - rectBotLeft.x, rectTopRight.y - rectBotLeft.y, rectBotLeft.x - rectBotLeft.x, rectBotLeft.y - rectBotLeft.y, rectBotRight.x - rectBotLeft.x, rectBotRight.y - rectBotLeft.y); //calculate transform matrix transformMat = new OpenCVForUnity.Mat(3, 3, CvType.CV_32FC1); transformMat = Imgproc.getPerspectiveTransform(srcMat, dstMat); Texture2D sourceTex = ScreenCapture.CaptureScreenshotAsTexture(); Color[] pix = sourceTex.GetPixels((int)rectBotLeft.x, (int)rectBotLeft.y, width, height); Texture2D destTex = new Texture2D(width, height); destTex.SetPixels(pix); destTex.Apply(); OpenCVForUnity.Size textureSize = new OpenCVForUnity.Size(width, height); OpenCVForUnity.Mat rawImageSrcMat = new OpenCVForUnity.Mat(textureSize, CvType.CV_8UC4); OpenCVForUnity.Mat rawImageSrcMatFlip = new OpenCVForUnity.Mat(textureSize, CvType.CV_8UC4); Utils.texture2DToMat(destTex, rawImageSrcMat); Core.flip(rawImageSrcMat, rawImageSrcMatFlip, 0); OpenCVForUnity.Mat rawImageDstMat = new OpenCVForUnity.Mat(textureSize, CvType.CV_8UC4); //Mat rawImageDstMatFlip = new Mat(textureSize, CvType.CV_8UC4); Imgproc.warpPerspective(rawImageSrcMatFlip, rawImageDstMat, transformMat, textureSize); texture = new Texture2D(width, height, TextureFormat.RGB24, false); Utils.matToTexture2D(rawImageDstMat, texture); rawImageRI.texture = texture; }
public Point[] calc_peaks (Mat im, Point[] points, OpenCVForUnity.Size ssize) { int n = points.Length; // Debug.Log ("n == int(patches.size()) " + patches.Count); using (Mat pt = (new MatOfPoint2f (points)).reshape (1, 2 * n)) using (Mat S = calc_simil (pt)) using (Mat Si = inv_simil (S)) { Point[] pts = apply_simil (Si, points); for (int i = 0; i < n; i++) { OpenCVForUnity.Size wsize = new OpenCVForUnity.Size (ssize.width + patches [i].patch_size ().width, ssize.height + patches [i].patch_size ().height); using (Mat A = new Mat (2, 3, CvType.CV_32F)) { A.put (0, 0, S.get (0, 0) [0]); A.put (0, 1, S.get (0, 1) [0]); A.put (1, 0, S.get (1, 0) [0]); A.put (1, 1, S.get (1, 1) [0]); A.put (0, 2, pt.get (2 * i, 0) [0] - (A.get (0, 0) [0] * (wsize.width - 1) / 2 + A.get (0, 1) [0] * (wsize.height - 1) / 2)); A.put (1, 2, pt.get (2 * i + 1, 0) [0] - (A.get (1, 0) [0] * (wsize.width - 1) / 2 + A.get (1, 1) [0] * (wsize.height - 1) / 2)); using (Mat I = new Mat ()) { Imgproc.warpAffine (im, I, A, wsize, Imgproc.INTER_LINEAR + Imgproc.WARP_INVERSE_MAP); using (Mat R = patches [i].calc_response (I, false)) { Core.MinMaxLocResult minMaxLocResult = Core.minMaxLoc (R); pts [i].x = pts [i].x + minMaxLocResult.maxLoc.x - 0.5 * ssize.width; pts [i].y = pts [i].y + minMaxLocResult.maxLoc.y - 0.5 * ssize.height; } } } } return apply_simil (S, pts); } }
public Point[] calc_peaks(Mat im, Point[] points, OpenCVForUnity.Size ssize) { int n = points.Length; // Debug.Log ("n == int(patches.size()) " + patches.Count); using (Mat pt = (new MatOfPoint2f(points)).reshape(1, 2 * n)) using (Mat S = calc_simil(pt)) using (Mat Si = inv_simil(S)) { Point[] pts = apply_simil(Si, points); for (int i = 0; i < n; i++) { OpenCVForUnity.Size wsize = new OpenCVForUnity.Size(ssize.width + patches [i].patch_size().width, ssize.height + patches [i].patch_size().height); using (Mat A = new Mat(2, 3, CvType.CV_32F)) { A.put(0, 0, S.get(0, 0) [0]); A.put(0, 1, S.get(0, 1) [0]); A.put(1, 0, S.get(1, 0) [0]); A.put(1, 1, S.get(1, 1) [0]); A.put(0, 2, pt.get(2 * i, 0) [0] - (A.get(0, 0) [0] * (wsize.width - 1) / 2 + A.get(0, 1) [0] * (wsize.height - 1) / 2)); A.put(1, 2, pt.get(2 * i + 1, 0) [0] - (A.get(1, 0) [0] * (wsize.width - 1) / 2 + A.get(1, 1) [0] * (wsize.height - 1) / 2)); using (Mat I = new Mat()) { Imgproc.warpAffine(im, I, A, wsize, Imgproc.INTER_LINEAR + Imgproc.WARP_INVERSE_MAP); using (Mat R = patches [i].calc_response(I, false)) { Core.MinMaxLocResult minMaxLocResult = Core.minMaxLoc(R); pts [i].x = pts [i].x + minMaxLocResult.maxLoc.x - 0.5 * ssize.width; pts [i].y = pts [i].y + minMaxLocResult.maxLoc.y - 0.5 * ssize.height; } } } } return(apply_simil(S, pts)); } }
private Point[] fit(Mat image, Point[] init, OpenCVForUnity.Size ssize, bool robust, int itol, double ftol) { int n = smodel.npts(); // assert((int(init.size())==n) && (pmodel.n_patches()==n)); // Debug.Log ("init.size())==n " + init.Length + " " + n); // Debug.Log ("pmodel.n_patches()==n " + pmodel.n_patches () + " " + n); smodel.calc_params(init, new Mat(), 3.0f); Point[] pts = smodel.calc_shape(); //find facial features in image around current estimates Point[] peaks = pmodel.calc_peaks(image, pts, ssize); //optimise if (!robust) { smodel.calc_params(peaks, new Mat(), 3.0f); //compute shape model parameters pts = smodel.calc_shape(); //update shape } else { using (Mat weight = new Mat(n, 1, CvType.CV_32F)) using (Mat weight_sort = new Mat(n, 1, CvType.CV_32F)) { float[] weight_float = new float[weight.total()]; Utils.copyFromMat <float>(weight, weight_float); float[] weight_sort_float = new float[weight_sort.total()]; Point[] pts_old = pts; for (int iter = 0; iter < itol; iter++) { //compute robust weight for (int i = 0; i < n; i++) { using (MatOfPoint tmpMat = new MatOfPoint(new Point(pts [i].x - peaks [i].x, pts [i].y - peaks [i].y))) { weight_float [i] = (float)Core.norm(tmpMat); } } Utils.copyToMat(weight_float, weight); Core.sort(weight, weight_sort, Core.SORT_EVERY_COLUMN | Core.SORT_ASCENDING); Utils.copyFromMat <float>(weight_sort, weight_sort_float); double var = 1.4826 * weight_sort_float [n / 2]; if (var < 0.1) { var = 0.1; } Core.pow(weight, 2, weight); Core.multiply(weight, new Scalar(-0.5 / (var * var)), weight); Core.exp(weight, weight); //compute shape model parameters smodel.calc_params(peaks, weight, 3.0f); //update shape pts = smodel.calc_shape(); //check for convergence float v = 0; for (int i = 0; i < n; i++) { using (MatOfPoint tmpMat = new MatOfPoint(new Point(pts [i].x - pts_old [i].x, pts [i].y - pts_old [i].y))) { v += (float)Core.norm(tmpMat); } } if (v < ftol) { break; } else { pts_old = pts; } } } } return(pts); }