void DoProcess() { if (!(owner.Value is OpenCVForUnityPlayMakerActions.Rect)) { LogError("owner is not initialized. Add Action \"newRect\"."); return; } OpenCVForUnity.CoreModule.Rect wrapped_owner = OpenCVForUnityPlayMakerActionsUtils.GetWrappedObject <OpenCVForUnityPlayMakerActions.Rect, OpenCVForUnity.CoreModule.Rect>(owner); storeResult.Value = wrapped_owner.contains(x.Value, y.Value); Fsm.Event(storeResult.Value ? trueEvent : falseEvent); }
// Update is called once per frame void Update() { if (webCamTextureToMatHelper.IsPlaying() && webCamTextureToMatHelper.DidUpdateThisFrame()) { Mat rgbaMat = webCamTextureToMatHelper.GetMat(); Imgproc.cvtColor(rgbaMat, grayMat, Imgproc.COLOR_RGBA2GRAY); bool result = detector.detect(grayMat, points); if (result) { // Debug.Log (points.dump ()); float[] points_arr = new float[8]; points.get(0, 0, points_arr); bool decode = true; // Whether all points are in the image area or not. for (int i = 0; i < 8; i = i + 2) { if (!imageSizeRect.contains(new Point(points_arr [i], points_arr [i + 1]))) { decode = false; // Debug.Log ("The point exists out of the image area."); break; } } // draw QRCode contour. Imgproc.line(rgbaMat, new Point(points_arr [0], points_arr [1]), new Point(points_arr [2], points_arr [3]), new Scalar(255, 0, 0, 255), 2); Imgproc.line(rgbaMat, new Point(points_arr [2], points_arr [3]), new Point(points_arr [4], points_arr [5]), new Scalar(255, 0, 0, 255), 2); Imgproc.line(rgbaMat, new Point(points_arr [4], points_arr [5]), new Point(points_arr [6], points_arr [7]), new Scalar(255, 0, 0, 255), 2); Imgproc.line(rgbaMat, new Point(points_arr [6], points_arr [7]), new Point(points_arr [0], points_arr [1]), new Scalar(255, 0, 0, 255), 2); if (decode) { string decode_info = detector.decode(grayMat, points); // Debug.Log (decode_info); Imgproc.putText(rgbaMat, "DECODE INFO: " + decode_info, new Point(5, grayMat.rows() - 10), Imgproc.FONT_HERSHEY_SIMPLEX, 0.7, new Scalar(255, 255, 255, 255), 2, Imgproc.LINE_AA, false); } } Utils.fastMatToTexture2D(rgbaMat, texture); } }
// Update is called once per frame void Update() { if (webCamTextureToMatHelper.IsPlaying() && webCamTextureToMatHelper.DidUpdateThisFrame()) { Mat rgbaMat = webCamTextureToMatHelper.GetMat(); //convert image to greyscale Imgproc.cvtColor(rgbaMat, grayMat, Imgproc.COLOR_RGBA2GRAY); if (isAutoResetMode || faceTracker.getPoints().Count <= 0) { // Debug.Log ("detectFace"); //convert image to greyscale using (Mat equalizeHistMat = new Mat()) using (MatOfRect faces = new MatOfRect()) { Imgproc.equalizeHist(grayMat, equalizeHistMat); cascade.detectMultiScale(equalizeHistMat, faces, 1.1f, 2, 0 | Objdetect.CASCADE_FIND_BIGGEST_OBJECT | Objdetect.CASCADE_SCALE_IMAGE, new Size(equalizeHistMat.cols() * 0.15, equalizeHistMat.cols() * 0.15), new Size()); if (faces.rows() > 0) { // Debug.Log ("faces " + faces.dump ()); List <OpenCVForUnity.CoreModule.Rect> rectsList = faces.toList(); List <Point[]> pointsList = faceTracker.getPoints(); if (isAutoResetMode) { //add initial face points from MatOfRect if (pointsList.Count <= 0) { faceTracker.addPoints(faces); // Debug.Log ("reset faces "); } else { for (int i = 0; i < rectsList.Count; i++) { OpenCVForUnity.CoreModule.Rect trackRect = new OpenCVForUnity.CoreModule.Rect(rectsList [i].x + rectsList [i].width / 3, rectsList [i].y + rectsList [i].height / 2, rectsList [i].width / 3, rectsList [i].height / 3); //It determines whether nose point has been included in trackRect. if (i < pointsList.Count && !trackRect.contains(pointsList [i] [67])) { rectsList.RemoveAt(i); pointsList.RemoveAt(i); // Debug.Log ("remove " + i); } Imgproc.rectangle(rgbaMat, new Point(trackRect.x, trackRect.y), new Point(trackRect.x + trackRect.width, trackRect.y + trackRect.height), new Scalar(0, 0, 255, 255), 2); } } } else { faceTracker.addPoints(faces); } //draw face rect for (int i = 0; i < rectsList.Count; i++) { Imgproc.rectangle(rgbaMat, new Point(rectsList [i].x, rectsList [i].y), new Point(rectsList [i].x + rectsList [i].width, rectsList [i].y + rectsList [i].height), new Scalar(255, 0, 0, 255), 2); } } else { if (isAutoResetMode) { faceTracker.reset(); rightEye.SetActive(false); leftEye.SetActive(false); head.SetActive(false); mouth.SetActive(false); axes.SetActive(false); } } } } //track face points.if face points <= 0, always return false. if (faceTracker.track(grayMat, faceTrackerParams)) { if (isShowingFacePoints) { faceTracker.draw(rgbaMat, new Scalar(255, 0, 0, 255), new Scalar(0, 255, 0, 255)); } Imgproc.putText(rgbaMat, "'Tap' or 'Space Key' to Reset", new Point(5, rgbaMat.rows() - 5), Imgproc.FONT_HERSHEY_SIMPLEX, 0.8, new Scalar(255, 255, 255, 255), 2, Imgproc.LINE_AA, false); Point[] points = faceTracker.getPoints() [0]; if (points.Length > 0) { // for (int i = 0; i < points.Length; i++) // { // Imgproc.putText(rgbaMat, "" + i, new Point(points [i].x, points [i].y), Imgproc.FONT_HERSHEY_SIMPLEX, 0.3, new Scalar(0, 0, 255, 255), 2, Imgproc.LINE_AA, false); // } imagePoints.fromArray( points [31], //l eye points [36], //r eye points [67], //nose points [48], //l mouth points [54] //r mouth // , // points [0],//l ear // points [14]//r ear ); Calib3d.solvePnP(objectPoints, imagePoints, camMatrix, distCoeffs, rvec, tvec); bool isRefresh = false; if (tvec.get(2, 0) [0] > 0 && tvec.get(2, 0) [0] < 1200 * ((float)rgbaMat.cols() / (float)webCamTextureToMatHelper.requestedWidth)) { isRefresh = true; if (oldRvec == null) { oldRvec = new Mat(); rvec.copyTo(oldRvec); } if (oldTvec == null) { oldTvec = new Mat(); tvec.copyTo(oldTvec); } //filter Rvec Noise. using (Mat absDiffRvec = new Mat()) { Core.absdiff(rvec, oldRvec, absDiffRvec); // Debug.Log ("absDiffRvec " + absDiffRvec.dump()); using (Mat cmpRvec = new Mat()) { Core.compare(absDiffRvec, new Scalar(rvecNoiseFilterRange), cmpRvec, Core.CMP_GT); if (Core.countNonZero(cmpRvec) > 0) { isRefresh = false; } } } //filter Tvec Noise. using (Mat absDiffTvec = new Mat()) { Core.absdiff(tvec, oldTvec, absDiffTvec); // Debug.Log ("absDiffRvec " + absDiffRvec.dump()); using (Mat cmpTvec = new Mat()) { Core.compare(absDiffTvec, new Scalar(tvecNoiseFilterRange), cmpTvec, Core.CMP_GT); if (Core.countNonZero(cmpTvec) > 0) { isRefresh = false; } } } } if (isRefresh) { if (isShowingEffects) { rightEye.SetActive(true); } if (isShowingEffects) { leftEye.SetActive(true); } if (isShowingHead) { head.SetActive(true); } if (isShowingAxes) { axes.SetActive(true); } if ((Mathf.Abs((float)(points [48].x - points [56].x)) < Mathf.Abs((float)(points [31].x - points [36].x)) / 2.2 && Mathf.Abs((float)(points [51].y - points [57].y)) > Mathf.Abs((float)(points [31].x - points [36].x)) / 2.9) || Mathf.Abs((float)(points [51].y - points [57].y)) > Mathf.Abs((float)(points [31].x - points [36].x)) / 2.7) { if (isShowingEffects) { mouth.SetActive(true); } } else { if (isShowingEffects) { mouth.SetActive(false); } } rvec.copyTo(oldRvec); tvec.copyTo(oldTvec); Calib3d.Rodrigues(rvec, rotM); transformationM.SetRow(0, new Vector4((float)rotM.get(0, 0) [0], (float)rotM.get(0, 1) [0], (float)rotM.get(0, 2) [0], (float)tvec.get(0, 0) [0])); transformationM.SetRow(1, new Vector4((float)rotM.get(1, 0) [0], (float)rotM.get(1, 1) [0], (float)rotM.get(1, 2) [0], (float)tvec.get(1, 0) [0])); transformationM.SetRow(2, new Vector4((float)rotM.get(2, 0) [0], (float)rotM.get(2, 1) [0], (float)rotM.get(2, 2) [0], (float)tvec.get(2, 0) [0])); transformationM.SetRow(3, new Vector4(0, 0, 0, 1)); // right-handed coordinates system (OpenCV) to left-handed one (Unity) ARM = invertYM * transformationM; // Apply Z-axis inverted matrix. ARM = ARM * invertZM; if (shouldMoveARCamera) { if (ARGameObject != null) { ARM = ARGameObject.transform.localToWorldMatrix * ARM.inverse; ARUtils.SetTransformFromMatrix(ARCamera.transform, ref ARM); ARGameObject.SetActive(true); } } else { ARM = ARCamera.transform.localToWorldMatrix * ARM; if (ARGameObject != null) { ARUtils.SetTransformFromMatrix(ARGameObject.transform, ref ARM); ARGameObject.SetActive(true); } } } } } // Imgproc.putText (rgbaMat, "W:" + rgbaMat.width () + " H:" + rgbaMat.height () + " SO:" + Screen.orientation, new Point (5, rgbaMat.rows () - 10), Imgproc.FONT_HERSHEY_SIMPLEX, 1.0, new Scalar (255, 255, 255, 255), 2, Imgproc.LINE_AA, false); Utils.fastMatToTexture2D(rgbaMat, texture); } if (Input.GetKeyUp(KeyCode.Space) || Input.touchCount > 0) { faceTracker.reset(); if (oldRvec != null) { oldRvec.Dispose(); oldRvec = null; } if (oldTvec != null) { oldTvec.Dispose(); oldTvec = null; } rightEye.SetActive(false); leftEye.SetActive(false); head.SetActive(false); mouth.SetActive(false); axes.SetActive(false); } }
// Update is called once per frame void Update() { if (webCamTextureToMatHelper.IsPlaying() && webCamTextureToMatHelper.DidUpdateThisFrame()) { Mat rgbaMat = webCamTextureToMatHelper.GetMat(); //convert image to greyscale Imgproc.cvtColor(rgbaMat, grayMat, Imgproc.COLOR_RGBA2GRAY); if (isAutoResetMode || faceTracker.getPoints().Count <= 0) { // Debug.Log ("detectFace"); //convert image to greyscale using (Mat equalizeHistMat = new Mat()) using (MatOfRect faces = new MatOfRect()) { Imgproc.equalizeHist(grayMat, equalizeHistMat); cascade.detectMultiScale(equalizeHistMat, faces, 1.1f, 2, 0 // | Objdetect.CASCADE_FIND_BIGGEST_OBJECT | Objdetect.CASCADE_SCALE_IMAGE, new Size(equalizeHistMat.cols() * 0.15, equalizeHistMat.cols() * 0.15), new Size()); if (faces.rows() > 0) { // Debug.Log ("faces " + faces.dump ()); List <OpenCVForUnity.CoreModule.Rect> rectsList = faces.toList(); List <Point[]> pointsList = faceTracker.getPoints(); if (isAutoResetMode) { //add initial face points from MatOfRect if (pointsList.Count <= 0) { faceTracker.addPoints(faces); // Debug.Log ("reset faces "); } else { for (int i = 0; i < rectsList.Count; i++) { OpenCVForUnity.CoreModule.Rect trackRect = new OpenCVForUnity.CoreModule.Rect(rectsList [i].x + rectsList [i].width / 3, rectsList [i].y + rectsList [i].height / 2, rectsList [i].width / 3, rectsList [i].height / 3); //It determines whether nose point has been included in trackRect. if (i < pointsList.Count && !trackRect.contains(pointsList [i] [67])) { rectsList.RemoveAt(i); pointsList.RemoveAt(i); // Debug.Log ("remove " + i); } //uncomment below for rectangle around face Imgproc.rectangle(rgbaMat, new Point(trackRect.x, trackRect.y), new Point(trackRect.x + trackRect.width, trackRect.y + trackRect.height), new Scalar(0, 0, 255, 255), 2); } } } else { faceTracker.addPoints(faces); } //draw face rect for (int i = 0; i < rectsList.Count; i++) { //uncomment below for rectangle around face Imgproc.rectangle(rgbaMat, new Point(rectsList [i].x, rectsList [i].y), new Point(rectsList [i].x + rectsList [i].width, rectsList [i].y + rectsList [i].height), new Scalar(255, 0, 0, 255), 2); } } else { if (isAutoResetMode) { faceTracker.reset(); } } } } //track face points.if face points <= 0, always return false. if (faceTracker.track(grayMat, faceTrackerParams)) { //GameObject.FindGameObjectWithTag("left hand").transform.localScale = new Vector3(0.05f, 0.05f, 50); //GameObject.FindGameObjectWithTag("right hand").transform.localScale = new Vector3(0.05f, 0.05f, 50); //facecount = 0; if (facerec > 15) { GameObject.FindGameObjectWithTag("left hand").transform.localScale = new Vector3(0.2f, 0.2f, 50); GameObject.FindGameObjectWithTag("right hand").transform.localScale = new Vector3(0.2f, 0.2f, 50); facecount = 0; } else { facerec++; } //uncomment below for rectangle around face //faceTracker.draw(rgbaMat, new Scalar(255, 0, 0, 255), new Scalar(0, 255, 0, 255)); } else { //facecount prevents flickering of hand from poor face recognition if (facecount > 15) { facerec = 0; GameObject.FindGameObjectWithTag("left hand").transform.localScale = new Vector3(0f, 0f, 0); GameObject.FindGameObjectWithTag("right hand").transform.localScale = new Vector3(0f, 0f, 0); facecount++; } else { facecount++; } } //Imgproc.putText (rgbaMat, "'Tap' or 'Space Key' to Reset", new Point (5, rgbaMat.rows () - 5), Imgproc.FONT_HERSHEY_SIMPLEX, 0.8, new Scalar (255, 255, 255, 255), 2, Imgproc.LINE_AA, false); // Imgproc.putText (rgbaMat, "W:" + rgbaMat.width () + " H:" + rgbaMat.height () + " SO:" + Screen.orientation, new Point (5, rgbaMat.rows () - 10), Imgproc.FONT_HERSHEY_SIMPLEX, 1.0, new Scalar (255, 255, 255, 255), 2, Imgproc.LINE_AA, false); Utils.fastMatToTexture2D(rgbaMat, texture); } //facetrac resets upon screen click and space bar if (Input.GetKeyUp(KeyCode.Space) || Input.touchCount > 0) { faceTracker.reset(); } if (Input.GetKeyDown(KeyCode.Escape)) { if (SpeechRecognizer.IsRecording()) { SpeechRecognizer.StopIfRecording(); //resultText.text = "I stopped recording"; } Application.Quit(); //Application.LoadLevel ("MainActivity.class"); } }
// Update is called once per frame void Update() { if (webCamTextureToMatHelper.IsPlaying() && webCamTextureToMatHelper.DidUpdateThisFrame()) { Mat rgbaMat = webCamTextureToMatHelper.GetMat(); //convert image to greyscale Imgproc.cvtColor(rgbaMat, grayMat, Imgproc.COLOR_RGBA2GRAY); if (isAutoResetMode || faceTracker.getPoints().Count <= 0) { //Debug.Log ("detectFace"); //convert image to greyscale using (Mat equalizeHistMat = new Mat()) using (MatOfRect faces = new MatOfRect()) { Imgproc.equalizeHist(grayMat, equalizeHistMat); cascade.detectMultiScale(equalizeHistMat, faces, 1.1f, 2, 0 // | Objdetect.CASCADE_FIND_BIGGEST_OBJECT | Objdetect.CASCADE_SCALE_IMAGE, new Size(equalizeHistMat.cols() * 0.15, equalizeHistMat.cols() * 0.15), new Size()); if (faces.rows() > 0) { //Debug.Log ("faces " + faces.dump ()); List <OpenCVForUnity.CoreModule.Rect> rectsList = faces.toList(); List <Point[]> pointsList = faceTracker.getPoints(); if (isAutoResetMode) { //add initial face points from MatOfRect if (pointsList.Count <= 0) { faceTracker.addPoints(faces); //Debug.Log ("reset faces "); } else { for (int i = 0; i < rectsList.Count; i++) { OpenCVForUnity.CoreModule.Rect trackRect = new OpenCVForUnity.CoreModule.Rect(rectsList[i].x + rectsList[i].width / 3, rectsList[i].y + rectsList[i].height / 2, rectsList[i].width / 3, rectsList[i].height / 3); //It determines whether nose point has been included in trackRect. if (i < pointsList.Count && !trackRect.contains(pointsList[i][67])) { rectsList.RemoveAt(i); pointsList.RemoveAt(i); //Debug.Log ("remove " + i); } Imgproc.rectangle(rgbaMat, new Point(trackRect.x, trackRect.y), new Point(trackRect.x + trackRect.width, trackRect.y + trackRect.height), new Scalar(0, 0, 255, 255), 2); } } } else { faceTracker.addPoints(faces); } //draw face rect for (int i = 0; i < rectsList.Count; i++) { Imgproc.rectangle(rgbaMat, new Point(rectsList[i].x, rectsList[i].y), new Point(rectsList[i].x + rectsList[i].width, rectsList[i].y + rectsList[i].height), new Scalar(255, 0, 0, 255), 2); } } else { if (isAutoResetMode) { faceTracker.reset(); } } } } //track face points.if face points <= 0, always return false. if (faceTracker.track(grayMat, faceTrackerParams)) { faceTracker.draw(rgbaMat, new Scalar(255, 0, 0, 255), new Scalar(0, 255, 0, 255)); } Imgproc.putText(rgbaMat, "'Tap' or 'Space Key' to Reset", new Point(5, rgbaMat.rows() - 5), Imgproc.FONT_HERSHEY_SIMPLEX, 0.8, new Scalar(255, 255, 255, 255), 2, Imgproc.LINE_AA, false); //Imgproc.putText (rgbaMat, "W:" + rgbaMat.width () + " H:" + rgbaMat.height () + " SO:" + Screen.orientation, new Point (5, rgbaMat.rows () - 10), Imgproc.FONT_HERSHEY_SIMPLEX, 1.0, new Scalar (255, 255, 255, 255), 2, Imgproc.LINE_AA, false); Utils.fastMatToTexture2D(rgbaMat, texture); } if (Input.GetKeyUp(KeyCode.Space) || Input.touchCount > 0) { faceTracker.reset(); } }
public Mat HandProcessing(Mat img) { // process the image Mat resultMat = new Mat(); Imgproc.threshold(img, resultMat, HandThreshold, 255, Imgproc.THRESH_BINARY); Mat kernel = Imgproc.getStructuringElement(Imgproc.MORPH_RECT, new Size(10, 10)); Imgproc.morphologyEx(resultMat, resultMat, Imgproc.MORPH_OPEN, kernel); Imgproc.morphologyEx(resultMat, resultMat, Imgproc.MORPH_CLOSE, kernel); //button areas OpenCVForUnity.CoreModule.Rect RectButton_2 = new OpenCVForUnity.CoreModule.Rect(new Point(180, 400), new Point(360, 550)); OpenCVForUnity.CoreModule.Rect RectButton_3 = new OpenCVForUnity.CoreModule.Rect(new Point(400, 400), new Point(580, 550)); OpenCVForUnity.CoreModule.Rect RectButton_1 = new OpenCVForUnity.CoreModule.Rect(new Point(180, 170), new Point(360, 320)); OpenCVForUnity.CoreModule.Rect RectButton_4 = new OpenCVForUnity.CoreModule.Rect(new Point(400, 170), new Point(580, 320)); Imgproc.rectangle(resultMat, new Point(180, 400), new Point(360, 550), new Scalar(255, 0, 0)); Imgproc.rectangle(resultMat, new Point(400, 400), new Point(580, 550), new Scalar(255, 0, 0)); Imgproc.rectangle(resultMat, new Point(180, 170), new Point(360, 320), new Scalar(255, 0, 0)); Imgproc.rectangle(resultMat, new Point(400, 170), new Point(580, 320), new Scalar(255, 0, 0)); //VirtualKeybd.Keybd_event(27, 0, 0, 0); //if (Input.GetKeyDown(KeyCode.Escape)) { // button_1.onClick.Invoke(); //} // Find hand areas List <MatOfPoint> contours_out = new List <MatOfPoint>(); List <MatOfPoint> contours_all = new List <MatOfPoint>(); Mat hierarchy_out = new Mat(); Mat hierarchy_all = new Mat(); // RETR_EXER & CHAIN_APPROX_SIMPLE:find the out contour Imgproc.findContours(resultMat, contours_out, hierarchy_out, 0, 2); // RETR_TREE & CHAIN_APPROX_SIMPLE:find all of the contours Imgproc.findContours(resultMat, contours_all, hierarchy_all, 3, 2); // if have any inside. if (contours_all.Count > contours_out.Count) { for (int i = 0; i < contours_out.Count; i++) { Size OutSize = contours_out[i].size(); for (int j = 0; j < contours_all.Count; j++) { Size AllSize = contours_all[j].size(); if (OutSize == AllSize) { contours_all.RemoveAt(j); } } } } MatOfPoint2f contour_2f = new MatOfPoint2f(); OpenCVForUnity.CoreModule.Rect BoundingRect = new OpenCVForUnity.CoreModule.Rect(); List <OpenCVForUnity.CoreModule.Rect> TouchRect = new List <OpenCVForUnity.CoreModule.Rect>(); int maxArea_i = -1; for (int i = 0; i < contours_all.Count; i++) { double area = Imgproc.contourArea(contours_all[i]); if (area > MaxArea && area > MinArea_thres && area < MaxArea_thres) { MaxArea = area; maxArea_i = i; } } MaxArea = 0; if (maxArea_i > -1) { contours_all[maxArea_i].convertTo(contour_2f, CvType.CV_32F); BoundingRect = Imgproc.boundingRect((Mat)contour_2f); TouchRect.Add(BoundingRect); Imgproc.rectangle(resultMat, BoundingRect, new Scalar(0, 0, 0)); } if (TouchRect.Count == 1) { Point tl = TouchRect[0].tl(); Point br = TouchRect[0].br(); Point centerRect = new Point((tl.x + br.x) / 2, (tl.y + br.y) / 2); if (RectButton_1.contains(centerRect)) { VirtualKeybd.Keybd_event(65, 0, 0, 0); if (Input.GetKey(KeyCode.A)) { button_1.onClick.Invoke(); } } else if (RectButton_2.contains(centerRect)) { VirtualKeybd.Keybd_event(66, 0, 0, 0); if (Input.GetKey(KeyCode.B)) { button_2.onClick.Invoke(); } } else if (RectButton_3.contains(centerRect)) { VirtualKeybd.Keybd_event(67, 0, 0, 0); if (Input.GetKey(KeyCode.C)) { button_3.onClick.Invoke(); } } else if (RectButton_4.contains(centerRect)) { VirtualKeybd.Keybd_event(68, 0, 0, 0); if (Input.GetKey(KeyCode.D)) { button_4.onClick.Invoke(); } } } // make the bg black and don not effect projection Mat BlackImage = new Mat(resultMat.rows(), resultMat.cols(), CvType.CV_8UC1, new Scalar(0, 0, 0)); //return BlackImage; return(resultMat); }
public bool inside(Rect r) { return(r.contains(this)); }