// Use this for initialization void Start() { Texture2D imgTexture = Resources.Load ("lena") as Texture2D; Mat imgMat = new Mat (imgTexture.height, imgTexture.width, CvType.CV_8UC4); Utils.texture2DToMat (imgTexture, imgMat); Debug.Log ("imgMat dst ToString " + imgMat.ToString ()); //CascadeClassifier cascade = new CascadeClassifier (Utils.getFilePath ("lbpcascade_frontalface.xml")); CascadeClassifier cascade = new CascadeClassifier (Utils.getFilePath ("haarcascade_frontalface_alt.xml")); Mat grayMat = new Mat (); Imgproc.cvtColor (imgMat, grayMat, Imgproc.COLOR_RGBA2GRAY); Imgproc.equalizeHist (grayMat, grayMat); MatOfRect faces = new MatOfRect (); if (cascade != null) cascade.detectMultiScale (grayMat, faces, 1.1, 2, 2, new Size (20, 20), new Size ()); OpenCVForUnity.Rect[] rects = faces.toArray (); for (int i = 0; i < rects.Length; i++) { Debug.Log ("detect faces " + rects [i]); Core.rectangle (imgMat, new Point (rects [i].x, rects [i].y), new Point (rects [i].x + rects [i].width, rects [i].y + rects [i].height), new Scalar (255, 0, 0, 255), 2); } Texture2D texture = new Texture2D (imgMat.cols (), imgMat.rows (), TextureFormat.RGBA32, false); Utils.matToTexture2D (imgMat, texture); gameObject.GetComponent<Renderer> ().material.mainTexture = texture; }
private void DetectInRegion(Mat img, Rect r, List <Rect> detectedObjectsInRegions) { Rect r0 = new Rect(new Point(), img.size()); Rect r1 = new Rect(r.x, r.y, r.width, r.height); Rect.inflate(r1, (int)((r1.width * coeffTrackingWindowSize) - r1.width) / 2, (int)((r1.height * coeffTrackingWindowSize) - r1.height) / 2); r1 = Rect.intersect(r0, r1); if ((r1.width <= 0) || (r1.height <= 0)) { Debug.Log("DetectionBasedTracker::detectInRegion: Empty intersection"); return; } int d = Math.Min(r.width, r.height); d = (int)Math.Round(d * coeffObjectSizeToTrack); MatOfRect tmpobjects = new MatOfRect(); Mat img1 = new Mat(img, r1); //subimage for rectangle -- without data copying cascade.detectMultiScale(img1, tmpobjects, 1.1, 2, 0 | Objdetect.CASCADE_DO_CANNY_PRUNING | Objdetect.CASCADE_SCALE_IMAGE | Objdetect.CASCADE_FIND_BIGGEST_OBJECT, new Size(d, d), new Size()); Rect[] tmpobjectsArray = tmpobjects.toArray(); int len = tmpobjectsArray.Length; for (int i = 0; i < len; i++) { Rect tmp = tmpobjectsArray [i]; Rect curres = new Rect(new Point(tmp.x + r1.x, tmp.y + r1.y), tmp.size()); detectedObjectsInRegions.Add(curres); } }
private IEnumerator init() { if (webCamTexture != null) { faceTracker.reset(); webCamTexture.Stop(); initDone = false; rgbaMat.Dispose(); grayMat.Dispose(); } // Checks how many and which cameras are available on the device for (int cameraIndex = 0; cameraIndex < WebCamTexture.devices.Length; cameraIndex++) { if (WebCamTexture.devices [cameraIndex].isFrontFacing == isFrontFacing) { Debug.Log(cameraIndex + " name " + WebCamTexture.devices [cameraIndex].name + " isFrontFacing " + WebCamTexture.devices [cameraIndex].isFrontFacing); webCamDevice = WebCamTexture.devices [cameraIndex]; webCamTexture = new WebCamTexture(webCamDevice.name, width, height); break; } } if (webCamTexture == null) { webCamDevice = WebCamTexture.devices [0]; webCamTexture = new WebCamTexture(webCamDevice.name, width, height); } Debug.Log("width " + webCamTexture.width + " height " + webCamTexture.height + " fps " + webCamTexture.requestedFPS); // Starts the camera webCamTexture.Play(); while (true) { //If you want to use webcamTexture.width and webcamTexture.height on iOS, you have to wait until webcamTexture.didUpdateThisFrame == 1, otherwise these two values will be equal to 16. (http://forum.unity3d.com/threads/webcamtexture-and-error-0x0502.123922/) #if UNITY_IOS && !UNITY_EDITOR && (UNITY_4_6_3 || UNITY_4_6_4 || UNITY_5_0_0 || UNITY_5_0_1) if (webCamTexture.width > 16 && webCamTexture.height > 16) { #else if (webCamTexture.didUpdateThisFrame) { #endif Debug.Log("width " + webCamTexture.width + " height " + webCamTexture.height + " fps " + webCamTexture.requestedFPS); Debug.Log("videoRotationAngle " + webCamTexture.videoRotationAngle + " videoVerticallyMirrored " + webCamTexture.videoVerticallyMirrored + " isFrongFacing " + webCamDevice.isFrontFacing); colors = new Color32[webCamTexture.width * webCamTexture.height]; rgbaMat = new Mat(webCamTexture.height, webCamTexture.width, CvType.CV_8UC4); grayMat = new Mat(webCamTexture.height, webCamTexture.width, CvType.CV_8UC1); texture = new Texture2D(webCamTexture.width, webCamTexture.height, TextureFormat.RGBA32, false); cascade = new CascadeClassifier(Utils.getFilePath("haarcascade_frontalface_alt.xml")); if (cascade.empty()) { Debug.LogError("cascade file is not loaded.Please copy from “FaceTrackerSample/StreamingAssets/” to “Assets/StreamingAssets/” folder. "); } gameObject.transform.localEulerAngles = new Vector3(0, 0, 0); // gameObject.transform.rotation = gameObject.transform.rotation * Quaternion.AngleAxis (webCamTexture.videoRotationAngle, Vector3.back); gameObject.transform.localScale = new Vector3(webCamTexture.width, webCamTexture.height, 1); // bool _videoVerticallyMirrored = webCamTexture.videoVerticallyMirrored; // float scaleX = 1; // float scaleY = _videoVerticallyMirrored ? -1.0f : 1.0f; // gameObject.transform.localScale = new Vector3 (scaleX * gameObject.transform.localScale.x, scaleY * gameObject.transform.localScale.y, 1); gameObject.GetComponent <Renderer> ().material.mainTexture = texture; Camera.main.orthographicSize = webCamTexture.height / 2; initDone = true; break; } else { yield return(0); } } } // Update is called once per frame void Update() { if (!initDone) { return; } #if UNITY_IOS && !UNITY_EDITOR && (UNITY_4_6_3 || UNITY_4_6_4 || UNITY_5_0_0 || UNITY_5_0_1) if (webCamTexture.width > 16 && webCamTexture.height > 16) { #else if (webCamTexture.didUpdateThisFrame) { #endif Utils.webCamTextureToMat(webCamTexture, rgbaMat, colors); //flip to correct direction. if (webCamTexture.videoVerticallyMirrored) { if (webCamDevice.isFrontFacing) { if (webCamTexture.videoRotationAngle == 0) { Core.flip(rgbaMat, rgbaMat, -1); } else if (webCamTexture.videoRotationAngle == 180) { Core.flip(rgbaMat, rgbaMat, 0); } } else { if (webCamTexture.videoRotationAngle == 0) { } else if (webCamTexture.videoRotationAngle == 180) { Core.flip(rgbaMat, rgbaMat, 1); } } } else { if (webCamDevice.isFrontFacing) { if (webCamTexture.videoRotationAngle == 0) { Core.flip(rgbaMat, rgbaMat, 1); } else if (webCamTexture.videoRotationAngle == 180) { Core.flip(rgbaMat, rgbaMat, 0); } } else { if (webCamTexture.videoRotationAngle == 0) { } else if (webCamTexture.videoRotationAngle == 180) { Core.flip(rgbaMat, rgbaMat, -1); } } } //convert image to greyscale Imgproc.cvtColor(rgbaMat, grayMat, Imgproc.COLOR_RGBA2GRAY); if (faceTracker.getPoints().Count <= 0) { Debug.Log("detectFace"); //convert image to greyscale using (Mat equalizeHistMat = new Mat()) using (MatOfRect faces = new MatOfRect()) { Imgproc.equalizeHist(grayMat, equalizeHistMat); cascade.detectMultiScale(equalizeHistMat, faces, 1.1f, 2, 0 // | Objdetect.CASCADE_FIND_BIGGEST_OBJECT | Objdetect.CASCADE_SCALE_IMAGE, new OpenCVForUnity.Size(equalizeHistMat.cols() * 0.15, equalizeHistMat.cols() * 0.15), new Size()); if (faces.rows() > 0) { Debug.Log("faces " + faces.dump()); //add initial face points from MatOfRect faceTracker.addPoints(faces); //draw face rect OpenCVForUnity.Rect[] rects = faces.toArray(); for (int i = 0; i < rects.Length; i++) { Core.rectangle(rgbaMat, new Point(rects [i].x, rects [i].y), new Point(rects [i].x + rects [i].width, rects [i].y + rects [i].height), new Scalar(255, 0, 0, 255), 2); } } } } //track face points.if face points <= 0, always return false. if (faceTracker.track(grayMat, faceTrackerParams)) { faceTracker.draw(rgbaMat, new Scalar(255, 0, 0, 255), new Scalar(0, 255, 0, 255)); } Core.putText(rgbaMat, "'Tap' or 'Space Key' to Reset", new Point(5, rgbaMat.rows() - 5), Core.FONT_HERSHEY_SIMPLEX, 0.8, new Scalar(255, 255, 255, 255), 2, Core.LINE_AA, false); Utils.matToTexture2D(rgbaMat, texture, colors); } if (Input.GetKeyUp(KeyCode.Space) || Input.touchCount > 0) { faceTracker.reset(); } } void OnDisable() { webCamTexture.Stop(); } void OnGUI() { float screenScale = Screen.height / 240.0f; Matrix4x4 scaledMatrix = Matrix4x4.Scale(new Vector3(screenScale, screenScale, screenScale)); GUI.matrix = scaledMatrix; GUILayout.BeginVertical(); if (GUILayout.Button("back")) { Application.LoadLevel("FaceTrackerSample"); } if (GUILayout.Button("change camera")) { isFrontFacing = !isFrontFacing; StartCoroutine(init()); } GUILayout.EndVertical(); } } }
void OnPhotoCaptured(PhotoCapture.PhotoCaptureResult result, PhotoCaptureFrame photoCaptureFrame) { Matrix4x4 cameraToWorldMatrix; photoCaptureFrame.TryGetCameraToWorldMatrix(out cameraToWorldMatrix); Matrix4x4 worldToCameraMatrix = cameraToWorldMatrix.inverse; Matrix4x4 projectionMatrix; photoCaptureFrame.TryGetProjectionMatrix(out projectionMatrix); photoCaptureFrame.UploadImageDataToTexture(m_Texture); Utils.texture2DToMat(m_Texture, rgbaMat); Imgproc.cvtColor(rgbaMat, grayMat, Imgproc.COLOR_RGBA2GRAY); Imgproc.equalizeHist(grayMat, grayMat); // fill all black. //Imgproc.rectangle (rgbaMat, new Point (0, 0), new Point (rgbaMat.width (), rgbaMat.height ()), new Scalar (0, 0, 0, 0), -1); // draw an edge lines. Imgproc.rectangle(rgbaMat, new Point(0, 0), new Point(rgbaMat.width(), rgbaMat.height()), new Scalar(255, 0, 0, 255), 2); // draw a diagonal line. //Imgproc.line (rgbaMat, new Point (0, 0), new Point (rgbaMat.cols (), rgbaMat.rows ()), new Scalar (255, 0, 0, 255)); if (cascade != null) { cascade.detectMultiScale(grayMat, faces, 1.1, 2, 2, // TODO: objdetect.CV_HAAR_SCALE_IMAGE new Size(grayMat.cols() * 0.05, grayMat.rows() * 0.05), new Size()); } OpenCVForUnity.Rect[] rects = faces.toArray(); for (int i = 0; i < rects.Length; i++) { //Debug.Log ("detect faces " + rects [i]); Imgproc.rectangle(rgbaMat, new Point(rects [i].x, rects [i].y), new Point(rects [i].x + rects [i].width, rects [i].y + rects [i].height), new Scalar(255, 0, 0, 255), 2); } Imgproc.putText(rgbaMat, "W:" + rgbaMat.width() + " H:" + rgbaMat.height() + " SO:" + Screen.orientation, new Point(5, rgbaMat.rows() - 10), Core.FONT_HERSHEY_SIMPLEX, 1.5, new Scalar(0, 255, 0, 255), 2, Imgproc.LINE_AA, false); Utils.matToTexture2D(rgbaMat, m_Texture, colors); m_Texture.wrapMode = TextureWrapMode.Clamp; m_CanvasRenderer.enabled = true; m_CanvasRenderer.sharedMaterial.SetTexture("_MainTex", m_Texture); m_CanvasRenderer.sharedMaterial.SetMatrix("_WorldToCameraMatrix", worldToCameraMatrix); m_CanvasRenderer.sharedMaterial.SetMatrix("_CameraProjectionMatrix", projectionMatrix); m_CanvasRenderer.sharedMaterial.SetFloat("_VignetteScale", 0.0f); // Position the canvas object slightly in front // of the real world web camera. Vector3 position = cameraToWorldMatrix.GetColumn(3) - cameraToWorldMatrix.GetColumn(2); // Rotate the canvas object so that it faces the user. Quaternion rotation = Quaternion.LookRotation(-cameraToWorldMatrix.GetColumn(2), cameraToWorldMatrix.GetColumn(1)); m_Canvas.transform.position = position; m_Canvas.transform.rotation = rotation; Debug.Log("Took picture!"); Debug.Log("projectionMatrix:\n" + projectionMatrix.ToString()); m_CapturingPhoto = false; }
// Update is called once per frame void Update() { lock (sync) { while (ExecuteOnMainThread.Count > 0) { ExecuteOnMainThread.Dequeue().Invoke(); } } if (webCamTextureToMatHelper.IsPlaying() && webCamTextureToMatHelper.DidUpdateThisFrame()) { Mat rgbaMat = webCamTextureToMatHelper.GetDownScaleMat(webCamTextureToMatHelper.GetMat()); Imgproc.cvtColor(rgbaMat, grayMat, Imgproc.COLOR_RGBA2GRAY); Imgproc.equalizeHist(grayMat, grayMat); if (enable && !detecting) { detecting = true; grayMat.copyTo(grayMat4Thread); StartThread(ThreadWorker); } if (!isShowingWebCamImage) { // fill all black. Imgproc.rectangle(rgbaMat, new Point(0, 0), new Point(rgbaMat.width(), rgbaMat.height()), new Scalar(0, 0, 0, 0), -1); } Rect[] rects; if (!isUsingSeparateDetection) { if (didUpdateTheDetectionResult) { didUpdateTheDetectionResult = false; rectangleTracker.UpdateTrackedObjects(detectionResult.toList()); } rectangleTracker.GetObjects(resultObjects, true); rects = resultObjects.ToArray(); // rects = rectangleTracker.CreateCorrectionBySpeedOfRects (); for (int i = 0; i < rects.Length; i++) { //Debug.Log ("detected face[" + i + "] " + rects [i]); Imgproc.rectangle(rgbaMat, new Point(rects [i].x, rects [i].y), new Point(rects [i].x + rects [i].width, rects [i].y + rects [i].height), new Scalar(255, 0, 0, 255), 2); } } else { if (didUpdateTheDetectionResult) { didUpdateTheDetectionResult = false; //Debug.Log("process: get rectsWhereRegions were got from detectionResult"); rectsWhereRegions = detectionResult.toArray(); rects = rectsWhereRegions; for (int i = 0; i < rects.Length; i++) { Imgproc.rectangle(rgbaMat, new Point(rects [i].x, rects [i].y), new Point(rects [i].x + rects [i].width, rects [i].y + rects [i].height), new Scalar(0, 0, 255, 255), 1); } } else { //Debug.Log("process: get rectsWhereRegions from previous positions"); rectsWhereRegions = rectangleTracker.CreateCorrectionBySpeedOfRects(); rects = rectsWhereRegions; for (int i = 0; i < rects.Length; i++) { Imgproc.rectangle(rgbaMat, new Point(rects [i].x, rects [i].y), new Point(rects [i].x + rects [i].width, rects [i].y + rects [i].height), new Scalar(0, 255, 0, 255), 1); } } detectedObjectsInRegions.Clear(); if (rectsWhereRegions.Length > 0) { int len = rectsWhereRegions.Length; for (int i = 0; i < len; i++) { detectInRegion(grayMat, rectsWhereRegions [i], detectedObjectsInRegions); } } rectangleTracker.UpdateTrackedObjects(detectedObjectsInRegions); rectangleTracker.GetObjects(resultObjects, true); rects = resultObjects.ToArray(); for (int i = 0; i < rects.Length; i++) { //Debug.Log ("detected face[" + i + "] " + rects [i]); Imgproc.rectangle(rgbaMat, new Point(rects [i].x, rects [i].y), new Point(rects [i].x + rects [i].width, rects [i].y + rects [i].height), new Scalar(255, 0, 0, 255), 2); } } Utils.matToTexture2D(rgbaMat, texture, webCamTextureToMatHelper.GetBufferColors()); } if (webCamTextureToMatHelper.IsPlaying()) { Matrix4x4 cameraToWorldMatrix = Camera.main.cameraToWorldMatrix;; Matrix4x4 worldToCameraMatrix = cameraToWorldMatrix.inverse; texture.wrapMode = TextureWrapMode.Clamp; quad_renderer.sharedMaterial.SetMatrix("_WorldToCameraMatrix", worldToCameraMatrix); // Position the canvas object slightly in front // of the real world web camera. Vector3 position = cameraToWorldMatrix.GetColumn(3) - cameraToWorldMatrix.GetColumn(2); // Rotate the canvas object so that it faces the user. Quaternion rotation = Quaternion.LookRotation(-cameraToWorldMatrix.GetColumn(2), cameraToWorldMatrix.GetColumn(1)); gameObject.transform.position = position; gameObject.transform.rotation = rotation; } }
// Update is called once per frame void Update() { //Loop play if (capture.get (Videoio.CAP_PROP_POS_FRAMES) >= capture.get (Videoio.CAP_PROP_FRAME_COUNT)) capture.set (Videoio.CAP_PROP_POS_FRAMES, 0); //error PlayerLoop called recursively! on iOS.reccomend WebCamTexture. if (capture.grab ()) { capture.retrieve (rgbMat, 0); Imgproc.cvtColor (rgbMat, rgbMat, Imgproc.COLOR_BGR2RGB); //Debug.Log ("Mat toString " + rgbMat.ToString ()); using (HOGDescriptor des = new HOGDescriptor()) using (MatOfRect locations = new MatOfRect ()) using (MatOfDouble weights = new MatOfDouble ()) { des.setSVMDetector (HOGDescriptor.getDefaultPeopleDetector ()); des.detectMultiScale (rgbMat, locations, weights); OpenCVForUnity.Rect[] rects = locations.toArray (); for (int i = 0; i < rects.Length; i++) { // Debug.Log ("detected person " + rects [i]); Imgproc.rectangle (rgbMat, new Point (rects [i].x, rects [i].y), new Point (rects [i].x + rects [i].width, rects [i].y + rects [i].height), new Scalar (255, 0, 0), 2); } // Debug.Log (locations.ToString ()); // Debug.Log (weights.ToString ()); } Utils.matToTexture2D (rgbMat, texture); gameObject.GetComponent<Renderer> ().material.mainTexture = texture; } }
private IEnumerator init() { axes.SetActive (false); head.SetActive (false); rightEye.SetActive (false); leftEye.SetActive (false); mouth.SetActive (false); if (webCamTexture != null) { faceTracker.reset (); webCamTexture.Stop (); initDone = false; rgbaMat.Dispose (); grayMat.Dispose (); cascade.Dispose (); camMatrix.Dispose (); distCoeffs.Dispose (); } // Checks how many and which cameras are available on the device for (int cameraIndex = 0; cameraIndex < WebCamTexture.devices.Length; cameraIndex++) { if (WebCamTexture.devices [cameraIndex].isFrontFacing == isFrontFacing) { Debug.Log (cameraIndex + " name " + WebCamTexture.devices [cameraIndex].name + " isFrontFacing " + WebCamTexture.devices [cameraIndex].isFrontFacing); webCamDevice = WebCamTexture.devices [cameraIndex]; webCamTexture = new WebCamTexture (webCamDevice.name, width, height); break; } } if (webCamTexture == null) { webCamDevice = WebCamTexture.devices [0]; webCamTexture = new WebCamTexture (webCamDevice.name, width, height); } Debug.Log ("width " + webCamTexture.width + " height " + webCamTexture.height + " fps " + webCamTexture.requestedFPS); // Starts the camera webCamTexture.Play (); while (true) { //If you want to use webcamTexture.width and webcamTexture.height on iOS, you have to wait until webcamTexture.didUpdateThisFrame == 1, otherwise these two values will be equal to 16. (http://forum.unity3d.com/threads/webcamtexture-and-error-0x0502.123922/) #if UNITY_IOS && !UNITY_EDITOR && (UNITY_4_6_3 || UNITY_4_6_4 || UNITY_5_0_0 || UNITY_5_0_1) if (webCamTexture.width > 16 && webCamTexture.height > 16) { #else if (webCamTexture.didUpdateThisFrame) { #endif Debug.Log ("width " + webCamTexture.width + " height " + webCamTexture.height + " fps " + webCamTexture.requestedFPS); Debug.Log ("videoRotationAngle " + webCamTexture.videoRotationAngle + " videoVerticallyMirrored " + webCamTexture.videoVerticallyMirrored + " isFrongFacing " + webCamDevice.isFrontFacing); colors = new Color32[webCamTexture.width * webCamTexture.height]; rgbaMat = new Mat (webCamTexture.height, webCamTexture.width, CvType.CV_8UC4); grayMat = new Mat (webCamTexture.height, webCamTexture.width, CvType.CV_8UC1); texture = new Texture2D (webCamTexture.width, webCamTexture.height, TextureFormat.RGBA32, false); cascade = new CascadeClassifier (Utils.getFilePath ("haarcascade_frontalface_alt.xml")); if (cascade.empty ()) { Debug.LogError ("cascade file is not loaded.Please copy from “FaceTrackerSample/StreamingAssets/” to “Assets/StreamingAssets/” folder. "); } gameObject.transform.localScale = new Vector3 (webCamTexture.width, webCamTexture.height, 1); gameObject.transform.localEulerAngles = new Vector3 (0, 0, 0); // gameObject.transform.rotation = gameObject.transform.rotation * Quaternion.AngleAxis (webCamTexture.videoRotationAngle, Vector3.back); // bool _videoVerticallyMirrored = webCamTexture.videoVerticallyMirrored; // float scaleX = 1; // float scaleY = _videoVerticallyMirrored ? -1.0f : 1.0f; // gameObject.transform.localScale = new Vector3 (scaleX * gameObject.transform.localScale.x, scaleY * gameObject.transform.localScale.y, 1); gameObject.GetComponent<Renderer> ().material.mainTexture = texture; Camera.main.orthographicSize = webCamTexture.height / 2; int max_d = Mathf.Max (rgbaMat.rows (), rgbaMat.cols ()); camMatrix = new Mat (3, 3, CvType.CV_64FC1); camMatrix.put (0, 0, max_d); camMatrix.put (0, 1, 0); camMatrix.put (0, 2, rgbaMat.cols () / 2.0f); camMatrix.put (1, 0, 0); camMatrix.put (1, 1, max_d); camMatrix.put (1, 2, rgbaMat.rows () / 2.0f); camMatrix.put (2, 0, 0); camMatrix.put (2, 1, 0); camMatrix.put (2, 2, 1.0f); Size imageSize = new Size (rgbaMat.cols (), rgbaMat.rows ()); double apertureWidth = 0; double apertureHeight = 0; double[] fovx = new double[1]; double[] fovy = new double[1]; double[] focalLength = new double[1]; Point principalPoint = new Point (); double[] aspectratio = new double[1]; Calib3d.calibrationMatrixValues (camMatrix, imageSize, apertureWidth, apertureHeight, fovx, fovy, focalLength, principalPoint, aspectratio); Debug.Log ("imageSize " + imageSize.ToString ()); Debug.Log ("apertureWidth " + apertureWidth); Debug.Log ("apertureHeight " + apertureHeight); Debug.Log ("fovx " + fovx [0]); Debug.Log ("fovy " + fovy [0]); Debug.Log ("focalLength " + focalLength [0]); Debug.Log ("principalPoint " + principalPoint.ToString ()); Debug.Log ("aspectratio " + aspectratio [0]); ARCamera.fieldOfView = (float)fovy [0]; Debug.Log ("camMatrix " + camMatrix.dump ()); distCoeffs = new MatOfDouble (0, 0, 0, 0); Debug.Log ("distCoeffs " + distCoeffs.dump ()); lookAtM = getLookAtMatrix (new Vector3 (0, 0, 0), new Vector3 (0, 0, 1), new Vector3 (0, -1, 0)); Debug.Log ("lookAt " + lookAtM.ToString ()); invertZM = Matrix4x4.TRS (Vector3.zero, Quaternion.identity, new Vector3 (1, 1, -1)); initDone = true; break; } else { yield return 0; } } } // Update is called once per frame void Update () { if (!initDone) return; #if UNITY_IOS && !UNITY_EDITOR && (UNITY_4_6_3 || UNITY_4_6_4 || UNITY_5_0_0 || UNITY_5_0_1) if (webCamTexture.width > 16 && webCamTexture.height > 16) { #else if (webCamTexture.didUpdateThisFrame) { #endif Utils.webCamTextureToMat (webCamTexture, rgbaMat, colors); //flip to correct direction. if (webCamTexture.videoVerticallyMirrored) { if (webCamDevice.isFrontFacing) { if (webCamTexture.videoRotationAngle == 0) { Core.flip (rgbaMat, rgbaMat, -1); } else if (webCamTexture.videoRotationAngle == 180) { Core.flip (rgbaMat, rgbaMat, 0); } } else { if (webCamTexture.videoRotationAngle == 0) { } else if (webCamTexture.videoRotationAngle == 180) { Core.flip (rgbaMat, rgbaMat, 1); } } } else { if (webCamDevice.isFrontFacing) { if (webCamTexture.videoRotationAngle == 0) { Core.flip (rgbaMat, rgbaMat, 1); } else if (webCamTexture.videoRotationAngle == 180) { Core.flip (rgbaMat, rgbaMat, 0); } } else { if (webCamTexture.videoRotationAngle == 0) { } else if (webCamTexture.videoRotationAngle == 180) { Core.flip (rgbaMat, rgbaMat, -1); } } } //convert image to greyscale Imgproc.cvtColor (rgbaMat, grayMat, Imgproc.COLOR_RGBA2GRAY); if (faceTracker.getPoints ().Count <= 0) { Debug.Log ("detectFace"); //convert image to greyscale using (Mat equalizeHistMat = new Mat ()) using (MatOfRect faces = new MatOfRect ()) { Imgproc.equalizeHist (grayMat, equalizeHistMat); cascade.detectMultiScale (equalizeHistMat, faces, 1.1f, 2, 0 | Objdetect.CASCADE_FIND_BIGGEST_OBJECT | Objdetect.CASCADE_SCALE_IMAGE, new OpenCVForUnity.Size (equalizeHistMat.cols () * 0.15, equalizeHistMat.cols () * 0.15), new Size ()); if (faces.rows () > 0) { Debug.Log ("faces " + faces.dump ()); //add initial face points from MatOfRect faceTracker.addPoints (faces); //draw face rect OpenCVForUnity.Rect[] rects = faces.toArray (); for (int i = 0; i < rects.Length; i++) { Core.rectangle (rgbaMat, new Point (rects [i].x, rects [i].y), new Point (rects [i].x + rects [i].width, rects [i].y + rects [i].height), new Scalar (255, 0, 0, 255), 2); } } } } //track face points.if face points <= 0, always return false. if (faceTracker.track (grayMat, faceTrackerParams)) { if (isDrawPoints) faceTracker.draw (rgbaMat, new Scalar (255, 0, 0, 255), new Scalar (0, 255, 0, 255)); Core.putText (rgbaMat, "'Tap' or 'Space Key' to Reset", new Point (5, rgbaMat.rows () - 5), Core.FONT_HERSHEY_SIMPLEX, 0.8, new Scalar (255, 255, 255, 255), 2, Core.LINE_AA, false); Point[] points = faceTracker.getPoints () [0]; if (points.Length > 0) { // for (int i = 0; i < points.Length; i++) { // Core.putText (rgbaMat, "" + i, new Point (points [i].x, points [i].y), Core.FONT_HERSHEY_SIMPLEX, 0.3, new Scalar (0, 0, 255, 255), 2, Core.LINE_AA, false); // } imagePoints.fromArray ( points [31],//l eye points [36],//r eye points [67],//nose points [48],//l mouth points [54] //r mouth // , // points [1],//l ear // points [13]//r ear ); Calib3d.solvePnP (objectPoints, imagePoints, camMatrix, distCoeffs, rvec, tvec); bool isRefresh = false; if (tvec.get (2, 0) [0] > 0 && tvec.get (2, 0) [0] < 1200 * ((float)webCamTexture.width / (float)width)) { isRefresh = true; if (oldRvec == null) { oldRvec = new Mat (); rvec.copyTo (oldRvec); } if (oldTvec == null) { oldTvec = new Mat (); tvec.copyTo (oldTvec); } //filter Rvec Noise. using (Mat absDiffRvec = new Mat ()) { Core.absdiff (rvec, oldRvec, absDiffRvec); // Debug.Log ("absDiffRvec " + absDiffRvec.dump()); using (Mat cmpRvec = new Mat ()) { Core.compare (absDiffRvec, new Scalar (rvecNoiseFilterRange), cmpRvec, Core.CMP_GT); if (Core.countNonZero (cmpRvec) > 0) isRefresh = false; } } //filter Tvec Noise. using (Mat absDiffTvec = new Mat ()) { Core.absdiff (tvec, oldTvec, absDiffTvec); // Debug.Log ("absDiffRvec " + absDiffRvec.dump()); using (Mat cmpTvec = new Mat ()) { Core.compare (absDiffTvec, new Scalar (tvecNoiseFilterRange), cmpTvec, Core.CMP_GT); if (Core.countNonZero (cmpTvec) > 0) isRefresh = false; } } } if (isRefresh) { if (!rightEye.activeSelf) rightEye.SetActive (true); if (!leftEye.activeSelf) leftEye.SetActive (true); if ((Mathf.Abs ((float)(points [48].x - points [56].x)) < Mathf.Abs ((float)(points [31].x - points [36].x)) / 2.2 && Mathf.Abs ((float)(points [51].y - points [57].y)) > Mathf.Abs ((float)(points [31].x - points [36].x)) / 2.9) || Mathf.Abs ((float)(points [51].y - points [57].y)) > Mathf.Abs ((float)(points [31].x - points [36].x)) / 2.7) { if (!mouth.activeSelf) mouth.SetActive (true); } else { if (mouth.activeSelf) mouth.SetActive (false); } rvec.copyTo (oldRvec); tvec.copyTo (oldTvec); Calib3d.Rodrigues (rvec, rotM); transformationM .SetRow (0, new Vector4 ((float)rotM.get (0, 0) [0], (float)rotM.get (0, 1) [0], (float)rotM.get (0, 2) [0], (float)tvec.get (0, 0) [0])); transformationM.SetRow (1, new Vector4 ((float)rotM.get (1, 0) [0], (float)rotM.get (1, 1) [0], (float)rotM.get (1, 2) [0], (float)tvec.get (1, 0) [0])); transformationM.SetRow (2, new Vector4 ((float)rotM.get (2, 0) [0], (float)rotM.get (2, 1) [0], (float)rotM.get (2, 2) [0], (float)tvec.get (2, 0) [0])); transformationM.SetRow (3, new Vector4 (0, 0, 0, 1)); modelViewMtrx = lookAtM * transformationM * invertZM; ARCamera.worldToCameraMatrix = modelViewMtrx; // Debug.Log ("modelViewMtrx " + modelViewMtrx.ToString()); } } } Utils.matToTexture2D (rgbaMat, texture, colors); } if (Input.GetKeyUp (KeyCode.Space) || Input.touchCount > 0) { faceTracker.reset (); if (oldRvec != null) { oldRvec.Dispose (); oldRvec = null; } if (oldTvec != null) { oldTvec.Dispose (); oldTvec = null; } ARCamera.ResetWorldToCameraMatrix (); rightEye.SetActive (false); leftEye.SetActive (false); mouth.SetActive (false); } } void OnDisable () { webCamTexture.Stop (); } private Matrix4x4 getLookAtMatrix (Vector3 pos, Vector3 target, Vector3 up) { Vector3 z = Vector3.Normalize (pos - target); Vector3 x = Vector3.Normalize (Vector3.Cross (up, z)); Vector3 y = Vector3.Normalize (Vector3.Cross (z, x)); Matrix4x4 result = new Matrix4x4 (); result.SetRow (0, new Vector4 (x.x, x.y, x.z, -(Vector3.Dot (pos, x)))); result.SetRow (1, new Vector4 (y.x, y.y, y.z, -(Vector3.Dot (pos, y)))); result.SetRow (2, new Vector4 (z.x, z.y, z.z, -(Vector3.Dot (pos, z)))); result.SetRow (3, new Vector4 (0, 0, 0, 1)); return result; } void OnGUI () { float screenScale = Screen.height / 240.0f; Matrix4x4 scaledMatrix = Matrix4x4.Scale (new Vector3 (screenScale, screenScale, screenScale)); GUI.matrix = scaledMatrix; GUILayout.BeginVertical (); if (GUILayout.Button ("back")) { Application.LoadLevel ("FaceTrackerSample"); } if (GUILayout.Button ("change camera")) { isFrontFacing = !isFrontFacing; StartCoroutine (init ()); } if (GUILayout.Button ("drawPoints")) { if (isDrawPoints) { isDrawPoints = false; } else { isDrawPoints = true; } } if (GUILayout.Button ("axes")) { if (axes.activeSelf) { axes.SetActive (false); } else { axes.SetActive (true); } } if (GUILayout.Button ("head")) { if (head.activeSelf) { head.SetActive (false); } else { head.SetActive (true); } } GUILayout.EndVertical (); } }
void Update() { Scene Game = SceneManager.GetActiveScene(); if (webCamTextureToMatHelper.IsPlaying() && webCamTextureToMatHelper.DidUpdateThisFrame()) { Mat rgbaMat = webCamTextureToMatHelper.GetMat(); Imgproc.cvtColor(rgbaMat, grayMat, Imgproc.COLOR_RGBA2GRAY); Imgproc.equalizeHist(grayMat, grayMat); if (cascade != null) { cascade.detectMultiScale(grayMat, faces, 1.1, 2, 2, new Size(grayMat.cols() * 0.2, grayMat.rows() * 0.2), new Size()); } grayTest = grayMat; if (eyes_cascade != null) { eyes_cascade.detectMultiScale(grayTest, eyes, 1.1, 2, 0, new Size(grayTest.cols() * 0.13, grayTest.rows() * 0.13), new Size(30, 30)); } OpenCVForUnity.CoreModule.Rect[] rects_y = eyes.toArray(); for (int j = 0; j < rects_y.Length; j++) { Imgproc.rectangle(rgbaMat, new Point(rects_y[j].x, rects_y[j].y), new Point(rects_y[j].x + rects_y[j].width, rects_y[j].y + rects_y[j].height), new Scalar(255, 255, 0, 0), 2); } //Affichage du cadrillage pour le calibrage if (Game.name == "Calibrage" && CalibCompt < 4) { Imgproc.rectangle(rgbaMat, new Point(0, 0), new Point(640, 480), new Scalar(0, 0, 0, 0), 500); Imgproc.line(rgbaMat, new Point(200, 0), new Point(200, 480), new Scalar(255, 0, 0, 255), 1); Imgproc.line(rgbaMat, new Point(440, 0), new Point(440, 480), new Scalar(255, 0, 0, 255), 1); Imgproc.line(rgbaMat, new Point(200, 150), new Point(440, 150), new Scalar(255, 0, 0, 255), 1); } else if (Game.name == "Calibrage" && CalibCompt == 4) { Imgproc.rectangle(rgbaMat, new Point(0, 0), new Point(640, 480), new Scalar(0, 0, 0, 0), 500); Imgproc.rectangle(rgbaMat, new Point(205, 125), new Point(435, 355), new Scalar(255, 0, 0, 255), 1); } else { Imgproc.rectangle(rgbaMat, new Point(0, 0), new Point(640, 480), new Scalar(0, 0, 0, 0), 500); } OpenCVForUnity.CoreModule.Rect[] rects = faces.toArray(); for (int i = 0; i < rects.Length; i++) { //Lance la fonction selon la scene qui est joué if (Game.name == "Jeu") { Deplacement(rects[i], rects_y); } else if (Game.name == "Calibrage") { rgbaMat = CalibFace(rects[i], rgbaMat); } } Utils.fastMatToTexture2D(rgbaMat, texture); } }
// Update is called once per frame public override void UpdateValue() { if (cascade == null) { return; } if (matSource == null) { return; } didUpdateFaceRect = false; Mat rgbaMat = matSource.GetMatSource(); if (rgbaMat != null) { if (debugRawImage != null) { if (texture == null || (texture.width != rgbaMat.width() || texture.height != rgbaMat.height())) { texture = new Texture2D(rgbaMat.cols(), rgbaMat.rows(), TextureFormat.RGBA32, false); } if (colors == null || colors.Length != rgbaMat.width() * rgbaMat.height()) { colors = new Color32[rgbaMat.width() * rgbaMat.height()]; } debugRawImage.texture = texture; } Imgproc.cvtColor(rgbaMat, grayMat, Imgproc.COLOR_RGBA2GRAY); Imgproc.equalizeHist(grayMat, grayMat); if (cascade != null) { cascade.detectMultiScale(grayMat, faces, 1.1, 2, 2, // TODO: objdetect.CV_HAAR_SCALE_IMAGE new Size(grayMat.cols() * 0.2, grayMat.rows() * 0.2), new Size()); } OpenCVForUnity.Rect[] rects = faces.toArray(); for (int i = 0; i < rects.Length; i++) { if (i == 0) { faceRect = new UnityEngine.Rect(rects [i].x, rects [i].y, rects [i].width, rects [i].height); didUpdateFaceRect = true; //Debug.Log ("detect faces " + rects [i]); if (debugRawImage != null) { Imgproc.rectangle(rgbaMat, new Point(rects [i].x, rects [i].y), new Point(rects [i].x + rects [i].width, rects [i].y + rects [i].height), new Scalar(255, 0, 0, 255), 2); } } } //Imgproc.putText (rgbaMat, "W:" + rgbaMat.width () + " H:" + rgbaMat.height () + " SO:" + Screen.orientation, new Point (5, rgbaMat.rows () - 10), Core.FONT_HERSHEY_SIMPLEX, 0.5, new Scalar (255, 255, 255, 255), 1, Imgproc.LINE_AA, false); if (debugRawImage != null) { OpenCVForUnity.Utils.matToTexture2D(rgbaMat, texture, colors); } } }
// Update is called once per frame void Update () { if (webCamTextureToMatHelper.isPlaying () && webCamTextureToMatHelper.didUpdateThisFrame ()) { Mat rgbaMat = webCamTextureToMatHelper.GetMat (); //convert image to greyscale Imgproc.cvtColor (rgbaMat, grayMat, Imgproc.COLOR_RGBA2GRAY); if (faceTracker.getPoints ().Count <= 0) { Debug.Log ("detectFace"); //convert image to greyscale using (Mat equalizeHistMat = new Mat ()) using (MatOfRect faces = new MatOfRect ()) { Imgproc.equalizeHist (grayMat, equalizeHistMat); cascade.detectMultiScale (equalizeHistMat, faces, 1.1f, 2, 0 | Objdetect.CASCADE_FIND_BIGGEST_OBJECT | Objdetect.CASCADE_SCALE_IMAGE, new OpenCVForUnity.Size (equalizeHistMat.cols () * 0.15, equalizeHistMat.cols () * 0.15), new Size ()); if (faces.rows () > 0) { Debug.Log ("faces " + faces.dump ()); //add initial face points from MatOfRect faceTracker.addPoints (faces); //draw face rect OpenCVForUnity.Rect[] rects = faces.toArray (); for (int i = 0; i < rects.Length; i++) { #if OPENCV_3 Imgproc.rectangle(rgbaMat, new Point(rects[i].x, rects[i].y), new Point(rects[i].x + rects[i].width, rects[i].y + rects[i].height), new Scalar(255, 0, 0, 255), 2); #else Core.rectangle (rgbaMat, new Point (rects [i].x, rects [i].y), new Point (rects [i].x + rects [i].width, rects [i].y + rects [i].height), new Scalar (255, 0, 0, 255), 2); #endif } } } } //track face points.if face points <= 0, always return false. if (faceTracker.track (grayMat, faceTrackerParams)) { if (isDrawPoints) faceTracker.draw (rgbaMat, new Scalar (255, 0, 0, 255), new Scalar (0, 255, 0, 255)); #if OPENCV_3 Imgproc.putText(rgbaMat, "'Tap' or 'Space Key' to Reset", new Point(5, rgbaMat.rows() - 5), Core.FONT_HERSHEY_SIMPLEX, 0.8, new Scalar(255, 255, 255, 255), 2, Imgproc.LINE_AA, false); #else Core.putText (rgbaMat, "'Tap' or 'Space Key' to Reset", new Point (5, rgbaMat.rows () - 5), Core.FONT_HERSHEY_SIMPLEX, 0.8, new Scalar (255, 255, 255, 255), 2, Core.LINE_AA, false); #endif Point[] points = faceTracker.getPoints () [0]; if (points.Length > 0) { // for (int i = 0; i < points.Length; i++) { //#if OPENCV_3 // Imgproc.putText(rgbaMat, "" + i, new Point(points[i].x, points[i].y), Core.FONT_HERSHEY_SIMPLEX, 0.3, new Scalar(0, 0, 255, 255), 2, Core.LINE_AA, false); //#else // Core.putText (rgbaMat, "" + i, new Point (points [i].x, points [i].y), Core.FONT_HERSHEY_SIMPLEX, 0.3, new Scalar (0, 0, 255, 255), 2, Core.LINE_AA, false); //#endif // } imagePoints.fromArray ( points [31],//l eye points [36],//r eye points [67],//nose points [48],//l mouth points [54] //r mouth // , // points [1],//l ear // points [13]//r ear ); Calib3d.solvePnP (objectPoints, imagePoints, camMatrix, distCoeffs, rvec, tvec); bool isRefresh = false; if (tvec.get (2, 0) [0] > 0 && tvec.get (2, 0) [0] < 1200 * ((float)rgbaMat.cols () / (float)webCamTextureToMatHelper.requestWidth)) { isRefresh = true; if (oldRvec == null) { oldRvec = new Mat (); rvec.copyTo (oldRvec); } if (oldTvec == null) { oldTvec = new Mat (); tvec.copyTo (oldTvec); } //filter Rvec Noise. using (Mat absDiffRvec = new Mat ()) { Core.absdiff (rvec, oldRvec, absDiffRvec); // Debug.Log ("absDiffRvec " + absDiffRvec.dump()); using (Mat cmpRvec = new Mat ()) { Core.compare (absDiffRvec, new Scalar (rvecNoiseFilterRange), cmpRvec, Core.CMP_GT); if (Core.countNonZero (cmpRvec) > 0) isRefresh = false; } } //filter Tvec Noise. using (Mat absDiffTvec = new Mat ()) { Core.absdiff (tvec, oldTvec, absDiffTvec); // Debug.Log ("absDiffRvec " + absDiffRvec.dump()); using (Mat cmpTvec = new Mat ()) { Core.compare (absDiffTvec, new Scalar (tvecNoiseFilterRange), cmpTvec, Core.CMP_GT); if (Core.countNonZero (cmpTvec) > 0) isRefresh = false; } } } if (isRefresh) { if (!rightEye.activeSelf) rightEye.SetActive (true); if (!leftEye.activeSelf) leftEye.SetActive (true); if ((Mathf.Abs ((float)(points [48].x - points [56].x)) < Mathf.Abs ((float)(points [31].x - points [36].x)) / 2.2 && Mathf.Abs ((float)(points [51].y - points [57].y)) > Mathf.Abs ((float)(points [31].x - points [36].x)) / 2.9) || Mathf.Abs ((float)(points [51].y - points [57].y)) > Mathf.Abs ((float)(points [31].x - points [36].x)) / 2.7) { if (!mouth.activeSelf) mouth.SetActive (true); } else { if (mouth.activeSelf) mouth.SetActive (false); } rvec.copyTo (oldRvec); tvec.copyTo (oldTvec); Calib3d.Rodrigues (rvec, rotM); transformationM .SetRow (0, new Vector4 ((float)rotM.get (0, 0) [0], (float)rotM.get (0, 1) [0], (float)rotM.get (0, 2) [0], (float)tvec.get (0, 0) [0])); transformationM.SetRow (1, new Vector4 ((float)rotM.get (1, 0) [0], (float)rotM.get (1, 1) [0], (float)rotM.get (1, 2) [0], (float)tvec.get (1, 0) [0])); transformationM.SetRow (2, new Vector4 ((float)rotM.get (2, 0) [0], (float)rotM.get (2, 1) [0], (float)rotM.get (2, 2) [0], (float)tvec.get (2, 0) [0])); transformationM.SetRow (3, new Vector4 (0, 0, 0, 1)); modelViewMtrx = lookAtM * transformationM * invertZM; ARCamera.worldToCameraMatrix = modelViewMtrx; // Debug.Log ("modelViewMtrx " + modelViewMtrx.ToString()); } } } // Core.putText (rgbaMat, "W:" + rgbaMat.width () + " H:" + rgbaMat.height () + " SO:" + Screen.orientation, new Point (5, rgbaMat.rows () - 10), Core.FONT_HERSHEY_SIMPLEX, 1.0, new Scalar (255, 255, 255, 255), 2, Core.LINE_AA, false); Utils.matToTexture2D (rgbaMat, texture, colors); } if (Input.GetKeyUp (KeyCode.Space) || Input.touchCount > 0) { faceTracker.reset (); if (oldRvec != null) { oldRvec.Dispose (); oldRvec = null; } if (oldTvec != null) { oldTvec.Dispose (); oldTvec = null; } ARCamera.ResetWorldToCameraMatrix (); rightEye.SetActive (false); leftEye.SetActive (false); mouth.SetActive (false); } }
// Update is called once per frame void Update() { if (!initDone) { return; } if (screenOrientation != Screen.orientation) { screenOrientation = Screen.orientation; updateLayout(); } #if UNITY_IOS && !UNITY_EDITOR && (UNITY_4_6_3 || UNITY_4_6_4 || UNITY_5_0_0 || UNITY_5_0_1) if (webCamTexture.width > 16 && webCamTexture.height > 16) { #else if (webCamTexture.didUpdateThisFrame) { #endif Utils.webCamTextureToMat(webCamTexture, rgbaMat, colors); if (webCamDevice.isFrontFacing) { if (webCamTexture.videoRotationAngle == 0) { Core.flip(rgbaMat, rgbaMat, 1); } else if (webCamTexture.videoRotationAngle == 90) { Core.flip(rgbaMat, rgbaMat, 0); } if (webCamTexture.videoRotationAngle == 180) { Core.flip(rgbaMat, rgbaMat, 0); } else if (webCamTexture.videoRotationAngle == 270) { Core.flip(rgbaMat, rgbaMat, 1); } } else { if (webCamTexture.videoRotationAngle == 180) { Core.flip(rgbaMat, rgbaMat, -1); } else if (webCamTexture.videoRotationAngle == 270) { Core.flip(rgbaMat, rgbaMat, -1); } } Imgproc.cvtColor(rgbaMat, grayMat, Imgproc.COLOR_RGBA2GRAY); Imgproc.equalizeHist(grayMat, grayMat); if (cascade != null) { cascade.detectMultiScale(grayMat, faces, 1.1, 2, 2, // TODO: objdetect.CV_HAAR_SCALE_IMAGE new Size(webCamTexture.height * 0.2, webCamTexture.height * 0.2), new Size()); } OpenCVForUnity.Rect[] rects = faces.toArray(); for (int i = 0; i < rects.Length; i++) { // Debug.Log ("detect faces " + rects [i]); Core.rectangle(rgbaMat, new Point(rects [i].x, rects [i].y), new Point(rects [i].x + rects [i].width, rects [i].y + rects [i].height), new Scalar(255, 0, 0, 255), 2); } Utils.matToTexture2D(rgbaMat, texture, colors); } } void OnDisable() { webCamTexture.Stop(); }
// Update is called once per frame void Update() { if (webCamTextureToMatHelper.isPlaying() && webCamTextureToMatHelper.didUpdateThisFrame()) { Mat rgbaMat = webCamTextureToMatHelper.GetMat(); //convert image to greyscale Imgproc.cvtColor(rgbaMat, grayMat, Imgproc.COLOR_RGBA2GRAY); if (faceTracker.getPoints().Count <= 0) { Debug.Log("detectFace"); //convert image to greyscale using (Mat equalizeHistMat = new Mat()) using (MatOfRect faces = new MatOfRect()) { Imgproc.equalizeHist(grayMat, equalizeHistMat); cascade.detectMultiScale(equalizeHistMat, faces, 1.1f, 2, 0 // | Objdetect.CASCADE_FIND_BIGGEST_OBJECT | Objdetect.CASCADE_SCALE_IMAGE, new OpenCVForUnity.Size(equalizeHistMat.cols() * 0.15, equalizeHistMat.cols() * 0.15), new Size()); if (faces.rows() > 0) { Debug.Log("faces " + faces.dump()); //add initial face points from MatOfRect faceTracker.addPoints(faces); //draw face rect OpenCVForUnity.Rect[] rects = faces.toArray(); for (int i = 0; i < rects.Length; i++) { #if OPENCV_2 Core.rectangle(rgbaMat, new Point(rects [i].x, rects [i].y), new Point(rects [i].x + rects [i].width, rects [i].y + rects [i].height), new Scalar(255, 0, 0, 255), 2); #else Imgproc.rectangle(rgbaMat, new Point(rects [i].x, rects [i].y), new Point(rects [i].x + rects [i].width, rects [i].y + rects [i].height), new Scalar(255, 0, 0, 255), 2); #endif } } } } //track face points.if face points <= 0, always return false. if (faceTracker.track(grayMat, faceTrackerParams)) { faceTracker.draw(rgbaMat, new Scalar(255, 0, 0, 255), new Scalar(0, 255, 0, 255)); } #if OPENCV_2 Core.putText(rgbaMat, "'Tap' or 'Space Key' to Reset", new Point(5, rgbaMat.rows() - 5), Core.FONT_HERSHEY_SIMPLEX, 0.8, new Scalar(255, 255, 255, 255), 2, Core.LINE_AA, false); #else Imgproc.putText(rgbaMat, "'Tap' or 'Space Key' to Reset", new Point(5, rgbaMat.rows() - 5), Core.FONT_HERSHEY_SIMPLEX, 0.8, new Scalar(255, 255, 255, 255), 2, Imgproc.LINE_AA, false); #endif // Core.putText (rgbaMat, "W:" + rgbaMat.width () + " H:" + rgbaMat.height () + " SO:" + Screen.orientation, new Point (5, rgbaMat.rows () - 10), Core.FONT_HERSHEY_SIMPLEX, 1.0, new Scalar (255, 255, 255, 255), 2, Core.LINE_AA, false); Utils.matToTexture2D(rgbaMat, texture, colors); } if (Input.GetKeyUp(KeyCode.Space) || Input.touchCount > 0) { faceTracker.reset(); } }
/// <summary> /// Precess /// </summary> /// <returns></returns> private async void Process() { float DOWNSCALE_RATIO = 1.0f; while (true) { // Check TaskCancel if (tokenSource.Token.IsCancellationRequested) { break; } rgbaMat = webCamTextureToMatHelper.GetMat(); // Debug.Log ("rgbaMat.ToString() " + rgbaMat.ToString ()); Mat downScaleRgbaMat = null; DOWNSCALE_RATIO = 1.0f; if (enableDownScale) { downScaleRgbaMat = imageOptimizationHelper.GetDownScaleMat(rgbaMat); DOWNSCALE_RATIO = imageOptimizationHelper.downscaleRatio; } else { downScaleRgbaMat = rgbaMat; DOWNSCALE_RATIO = 1.0f; } Imgproc.cvtColor(downScaleRgbaMat, grayMat, Imgproc.COLOR_RGBA2GRAY); await Task.Run(() => { // detect faces on the downscale image if (!enableSkipFrame || !imageOptimizationHelper.IsCurrentFrameSkipped()) { using (Mat equalizeHistMat = new Mat()) using (MatOfRect faces = new MatOfRect()) { Imgproc.equalizeHist(grayMat, equalizeHistMat); cascade.detectMultiScale(equalizeHistMat, faces, 1.1f, 2, 0 | Objdetect.CASCADE_SCALE_IMAGE, new Size(equalizeHistMat.cols() * 0.15, equalizeHistMat.cols() * 0.15), new Size()); //Debug.Log("faces.dump() " + faces.dump()); detectResult = faces.toArray(); } if (enableDownScale) { for (int i = 0; i < detectResult.Length; ++i) { var rect = detectResult[i]; detectResult[i] = new OpenCVForUnity.CoreModule.Rect( (int)(rect.x *DOWNSCALE_RATIO), (int)(rect.y *DOWNSCALE_RATIO), (int)(rect.width *DOWNSCALE_RATIO), (int)(rect.height *DOWNSCALE_RATIO)); } } //Imgproc.rectangle(rgbaMat, new Point(0, 0), new Point(rgbaMat.width(), rgbaMat.height()), new Scalar(0, 0, 0, 0), -1); for (int i = 0; i < detectResult.Length; i++) { Imgproc.rectangle(rgbaMat, new Point(detectResult[i].x, detectResult[i].y), new Point(detectResult[i].x + detectResult[i].width, detectResult[i].y + detectResult[i].height), new Scalar(255, 0, 0, 255), 4); } } }); Utils.fastMatToTexture2D(rgbaMat, texture); Thread.Sleep(10); } }
public void OnFrameMatAcquired(Mat bgraMat, Matrix4x4 projectionMatrix, Matrix4x4 cameraToWorldMatrix) { Mat downScaleFrameMat = bgraMat; if (imageOptimizationHelper.downscaleRatio != 1) { downScaleFrameMat = new Mat(); Imgproc.resize(bgraMat, downScaleFrameMat, new Size(), 1.0 / imageOptimizationHelper.downscaleRatio, 1.0 / imageOptimizationHelper.downscaleRatio, Imgproc.INTER_LINEAR); } Imgproc.cvtColor(downScaleFrameMat, grayMat, Imgproc.COLOR_BGRA2GRAY); Imgproc.equalizeHist(grayMat, grayMat); if (enableDetection && !isDetecting) { isDetecting = true; grayMat.copyTo(grayMat4Thread); System.Threading.Tasks.Task.Run(() => { isThreadRunning = true; DetectObject(); isThreadRunning = false; OnDetectionDone(); }); } OpenCVForUnityUtils.SetImage(faceLandmarkDetector, grayMat); if (!displayCameraImage) { // fill all black. Imgproc.rectangle(downScaleFrameMat, new Point(0, 0), new Point(downScaleFrameMat.width(), downScaleFrameMat.height()), new Scalar(0, 0, 0, 0), -1); } Rect[] rects; if (!useSeparateDetection) { if (hasUpdatedDetectionResult) { hasUpdatedDetectionResult = false; lock (rectangleTracker) { rectangleTracker.UpdateTrackedObjects(detectionResult.toList()); } } lock (rectangleTracker) { rectangleTracker.GetObjects(resultObjects, true); } rects = resultObjects.ToArray(); if (displayDetectedFaceRect) { for (int i = 0; i < rects.Length; i++) { //UnityEngine.WSA.Application.InvokeOnAppThread (() => { // Debug.Log ("detected face[" + i + "] " + rects [i]); //}, true); Imgproc.rectangle(downScaleFrameMat, new Point(rects [i].x, rects [i].y), new Point(rects [i].x + rects [i].width, rects [i].y + rects [i].height), new Scalar(0, 0, 255, 255), 3); } } // Adjust to Dilb's result. foreach (OpenCVForUnity.Rect r in resultObjects) { r.y += (int)(r.height * 0.1f); } foreach (var rect in resultObjects) { //detect landmark points List <Vector2> points = faceLandmarkDetector.DetectLandmark(new UnityEngine.Rect(rect.x, rect.y, rect.width, rect.height)); //draw landmark points OpenCVForUnityUtils.DrawFaceLandmark(downScaleFrameMat, points, new Scalar(0, 255, 0, 255), 2); if (displayDetectedFaceRect) { //draw face rect OpenCVForUnityUtils.DrawFaceRect(downScaleFrameMat, new UnityEngine.Rect(rect.x, rect.y, rect.width, rect.height), new Scalar(0, 0, 255, 255), 2); } } } else { if (hasUpdatedDetectionResult) { hasUpdatedDetectionResult = false; //UnityEngine.WSA.Application.InvokeOnAppThread (() => { // Debug.Log("process: get rectsWhereRegions were got from detectionResult"); //}, true); lock (rectangleTracker) { rectsWhereRegions = detectionResult.toArray(); } if (displayDetectedFaceRect) { rects = rectsWhereRegions; for (int i = 0; i < rects.Length; i++) { Imgproc.rectangle(downScaleFrameMat, new Point(rects [i].x, rects [i].y), new Point(rects [i].x + rects [i].width, rects [i].y + rects [i].height), new Scalar(255, 0, 0, 255), 1); } } } else { //UnityEngine.WSA.Application.InvokeOnAppThread (() => { // Debug.Log("process: get rectsWhereRegions from previous positions"); //}, true); lock (rectangleTracker) { rectsWhereRegions = rectangleTracker.CreateCorrectionBySpeedOfRects(); } if (displayDetectedFaceRect) { rects = rectsWhereRegions; for (int i = 0; i < rects.Length; i++) { Imgproc.rectangle(downScaleFrameMat, new Point(rects [i].x, rects [i].y), new Point(rects [i].x + rects [i].width, rects [i].y + rects [i].height), new Scalar(0, 255, 0, 255), 1); } } } detectedObjectsInRegions.Clear(); if (rectsWhereRegions.Length > 0) { int len = rectsWhereRegions.Length; for (int i = 0; i < len; i++) { DetectInRegion(grayMat, rectsWhereRegions [i], detectedObjectsInRegions); } } lock (rectangleTracker) { rectangleTracker.UpdateTrackedObjects(detectedObjectsInRegions); rectangleTracker.GetObjects(resultObjects, true); } if (displayDetectedFaceRect) { rects = resultObjects.ToArray(); for (int i = 0; i < rects.Length; i++) { //UnityEngine.WSA.Application.InvokeOnAppThread (() => { // Debug.Log ("detected face[" + i + "] " + rects [i]); //}, true); Imgproc.rectangle(downScaleFrameMat, new Point(rects [i].x, rects [i].y), new Point(rects [i].x + rects [i].width, rects [i].y + rects [i].height), new Scalar(0, 0, 255, 255), 3); } } // Adjust to Dilb's result. foreach (OpenCVForUnity.Rect r in resultObjects) { r.y += (int)(r.height * 0.1f); } foreach (var rect in resultObjects) { //detect landmark points List <Vector2> points = faceLandmarkDetector.DetectLandmark(new UnityEngine.Rect(rect.x, rect.y, rect.width, rect.height)); //draw landmark points OpenCVForUnityUtils.DrawFaceLandmark(downScaleFrameMat, points, new Scalar(0, 255, 0, 255), 2); if (displayDetectedFaceRect) { //draw face rect OpenCVForUnityUtils.DrawFaceRect(downScaleFrameMat, new UnityEngine.Rect(rect.x, rect.y, rect.width, rect.height), new Scalar(0, 0, 255, 255), 2); } } } UnityEngine.WSA.Application.InvokeOnAppThread(() => { if (!webCamTextureToMatHelper.IsPlaying()) { return; } OpenCVForUnity.Utils.fastMatToTexture2D(downScaleFrameMat, texture); bgraMat.Dispose(); if (imageOptimizationHelper.downscaleRatio != 1) { downScaleFrameMat.Dispose(); } Matrix4x4 worldToCameraMatrix = cameraToWorldMatrix.inverse; quad_renderer.sharedMaterial.SetMatrix("_WorldToCameraMatrix", worldToCameraMatrix); // Position the canvas object slightly in front // of the real world web camera. Vector3 position = cameraToWorldMatrix.GetColumn(3) - cameraToWorldMatrix.GetColumn(2); // Rotate the canvas object so that it faces the user. Quaternion rotation = Quaternion.LookRotation(-cameraToWorldMatrix.GetColumn(2), cameraToWorldMatrix.GetColumn(1)); gameObject.transform.position = position; gameObject.transform.rotation = rotation; }, false); }
// 更新は、フレームごとに呼ばれます void Update() { if (!initDone) return; if (screenOrientation != Screen.orientation) { screenOrientation = Screen.orientation; updateLayout(); } #if UNITY_IOS && !UNITY_EDITOR && (UNITY_4_6_3 || UNITY_4_6_4 || UNITY_5_0_0 || UNITY_5_0_1) if (webCamTexture.width > 16 && webCamTexture.height > 16) { #else if (webCamTexture.didUpdateThisFrame) { #endif Utils.webCamTextureToMat(webCamTexture, rgbaMat, colors); // 方向を修正するために反転. if (webCamDevice.isFrontFacing) { if (webCamTexture.videoRotationAngle == 0) { Core.flip(rgbaMat, rgbaMat, 1); } else if (webCamTexture.videoRotationAngle == 90) { Core.flip(rgbaMat, rgbaMat, 0); } if (webCamTexture.videoRotationAngle == 180) { Core.flip(rgbaMat, rgbaMat, 0); } else if (webCamTexture.videoRotationAngle == 270) { Core.flip(rgbaMat, rgbaMat, 1); } } else { if (webCamTexture.videoRotationAngle == 180) { Core.flip(rgbaMat, rgbaMat, -1); } else if (webCamTexture.videoRotationAngle == 270) { Core.flip(rgbaMat, rgbaMat, -1); } } // グレースケールにイメージを変換します Imgproc.cvtColor(rgbaMat, grayMat, Imgproc.COLOR_RGBA2GRAY); if (faceTracker.getPoints().Count <= 0) { Debug.Log("detectFace"); // グレースケールにイメージを変換します using (Mat equalizeHistMat = new Mat()) using (MatOfRect faces = new MatOfRect()) { Imgproc.equalizeHist(grayMat, equalizeHistMat); cascade.detectMultiScale(equalizeHistMat, faces, 1.1f, 2, 0 | Objdetect.CASCADE_FIND_BIGGEST_OBJECT | Objdetect.CASCADE_SCALE_IMAGE, new OpenCVForUnity.Size(equalizeHistMat.cols() * 0.15, equalizeHistMat.cols() * 0.15), new Size()); if (faces.rows() > 0) { Debug.Log("--------------------------faces"); Debug.Log("faces " + faces.dump()); Debug.Log("--------------------------faces"); // MatOfRectから顔の初期座標を追加 faceTracker.addPoints(faces); // 顔の四角形を描きます OpenCVForUnity.Rect[] rects = faces.toArray(); for (int i = 0; i < rects.Length; i++) { Core.rectangle(rgbaMat, new Point(rects[i].x, rects[i].y), new Point(rects[i].x + rects[i].width, rects[i].y + rects[i].height), new Scalar(255, 0, 0, 255), 2); } } } } // 顔の座標を追跡します.if face points <= 0, always return false. if (faceTracker.track(grayMat, faceTrackerParams)) { if (isDrawPoints) //Debug.Log("--------------------------100-----------------------------------"); faceTracker.draw(rgbaMat, new Scalar(255, 0, 0, 255), new Scalar(0, 255, 0, 255)); //Debug.Log("--------------------------1000"); //Core.putText(rgbaMat, "'Tap' or 'Space Key' to Reset", new Point(5, rgbaMat.rows() - 5), Core.FONT_HERSHEY_SIMPLEX, 0.8, new Scalar(255, 255, 255, 255), 2, Core.LINE_AA, false); Point[] points = faceTracker.getPoints()[0]; //eyeWidth = points[36] - points[31]; /* Debug.Log("--------------------------0"); Debug.Log(points[31]); Debug.Log("--------------------------2"); Debug.Log(points[36]); Debug.Log("--------------------------1"); */ if (points.Length > 0) { imagePoints.fromArray( points[31], //l eye points[36], //r eye points[67], // nose points[48], //l mouth points[54] //r mouth ); Calib3d.solvePnP(objectPoints, imagePoints, camMatrix, distCoeffs, rvec, tvec); bool isRefresh = false; if (tvec.get(2, 0)[0] > 0 && tvec.get(2, 0)[0] < 1200 * ((float)webCamTexture.width / (float)width)) { isRefresh = true; if (oldRvec == null) { oldRvec = new Mat(); rvec.copyTo(oldRvec); } if (oldTvec == null) { oldTvec = new Mat(); tvec.copyTo(oldTvec); } // Rvecノイズをフィルタリング. using (Mat absDiffRvec = new Mat()) { Core.absdiff(rvec, oldRvec, absDiffRvec); using (Mat cmpRvec = new Mat()) { Core.compare(absDiffRvec, new Scalar(rvecNoiseFilterRange), cmpRvec, Core.CMP_GT); if (Core.countNonZero(cmpRvec) > 0) isRefresh = false; } } // Tvecノイズをフィルタリング. using (Mat absDiffTvec = new Mat()) { Core.absdiff(tvec, oldTvec, absDiffTvec); using (Mat cmpTvec = new Mat()) { Core.compare(absDiffTvec, new Scalar(tvecNoiseFilterRange), cmpTvec, Core.CMP_GT); if (Core.countNonZero(cmpTvec) > 0) isRefresh = false; } } } if (isRefresh) { if (!rightEye.activeSelf) rightEye.SetActive (true); if (!leftEye.activeSelf) leftEye.SetActive (true); rvec.copyTo(oldRvec); tvec.copyTo(oldTvec); Calib3d.Rodrigues(rvec, rotM); transformationM.SetRow(0, new Vector4((float)rotM.get(0, 0)[0], (float)rotM.get(0, 1)[0], (float)rotM.get(0, 2)[0], (float)tvec.get(0, 0)[0])); transformationM.SetRow(1, new Vector4((float)rotM.get(1, 0)[0], (float)rotM.get(1, 1)[0], (float)rotM.get(1, 2)[0], (float)tvec.get(1, 0)[0])); transformationM.SetRow(2, new Vector4((float)rotM.get(2, 0)[0], (float)rotM.get(2, 1)[0], (float)rotM.get(2, 2)[0], (float)tvec.get(2, 0)[0])); transformationM.SetRow(3, new Vector4(0, 0, 0, 1)); modelViewMtrx = lookAtM * transformationM * invertZM; ARCamera.worldToCameraMatrix = modelViewMtrx; } } } Utils.matToTexture2D(rgbaMat, texture, colors); } if (Input.GetKeyUp(KeyCode.Space) || Input.touchCount > 0) { faceTracker.reset(); if (oldRvec != null) { oldRvec.Dispose(); oldRvec = null; } if (oldTvec != null) { oldTvec.Dispose(); oldTvec = null; } ARCamera.ResetWorldToCameraMatrix(); rightEye.SetActive (false); leftEye.SetActive (false); } } void OnDisable() { webCamTexture.Stop(); }
// Update is called once per frame void Update() { lock (sync) { while (ExecuteOnMainThread.Count > 0) { ExecuteOnMainThread.Dequeue().Invoke(); } } if (webCamTextureToMatHelper.IsPlaying() && webCamTextureToMatHelper.DidUpdateThisFrame()) { Mat rgbaMat = webCamTextureToMatHelper.GetDownScaleMat(webCamTextureToMatHelper.GetMat()); Imgproc.cvtColor(rgbaMat, grayMat, Imgproc.COLOR_RGBA2GRAY); Imgproc.equalizeHist(grayMat, grayMat); if (enable && !detecting) { detecting = true; grayMat.copyTo(grayMat4Thread); StartThread(ThreadWorker); } OpenCVForUnityUtils.SetImage(faceLandmarkDetector, grayMat); Rect[] rects; if (!isUsingSeparateDetection) { if (didUpdateTheDetectionResult) { didUpdateTheDetectionResult = false; rectangleTracker.UpdateTrackedObjects(detectionResult.toList()); } rectangleTracker.GetObjects(resultObjects, true); rects = rectangleTracker.CreateCorrectionBySpeedOfRects(); if (rects.Length > 0) { OpenCVForUnity.Rect rect = rects [0]; // Adjust to Dilb's result. rect.y += (int)(rect.height * 0.1f); //detect landmark points List <Vector2> points = faceLandmarkDetector.DetectLandmark(new UnityEngine.Rect(rect.x, rect.y, rect.width, rect.height)); UpdateARHeadTransform(points); } } else { if (didUpdateTheDetectionResult) { didUpdateTheDetectionResult = false; //Debug.Log("process: get rectsWhereRegions were got from detectionResult"); rectsWhereRegions = detectionResult.toArray(); } else { //Debug.Log("process: get rectsWhereRegions from previous positions"); rectsWhereRegions = rectangleTracker.CreateCorrectionBySpeedOfRects(); } detectedObjectsInRegions.Clear(); if (rectsWhereRegions.Length > 0) { int len = rectsWhereRegions.Length; for (int i = 0; i < len; i++) { detectInRegion(grayMat, rectsWhereRegions [i], detectedObjectsInRegions); } } rectangleTracker.UpdateTrackedObjects(detectedObjectsInRegions); rectangleTracker.GetObjects(resultObjects, true); if (resultObjects.Count > 0) { OpenCVForUnity.Rect rect = resultObjects [0]; // Adjust to Dilb's result. rect.y += (int)(rect.height * 0.1f); //detect landmark points List <Vector2> points = faceLandmarkDetector.DetectLandmark(new UnityEngine.Rect(rect.x, rect.y, rect.width, rect.height)); UpdateARHeadTransform(points); } } } }
private IEnumerator init() { if (webCamTexture != null) { webCamTexture.Stop(); initDone = false; rgbaMat.Dispose(); grayMat.Dispose(); } // Checks how many and which cameras are available on the device for (int cameraIndex = 0; cameraIndex < WebCamTexture.devices.Length; cameraIndex++) { if (WebCamTexture.devices [cameraIndex].isFrontFacing == isFrontFacing) { Debug.Log(cameraIndex + " name " + WebCamTexture.devices [cameraIndex].name + " isFrontFacing " + WebCamTexture.devices [cameraIndex].isFrontFacing); webCamDevice = WebCamTexture.devices [cameraIndex]; webCamTexture = new WebCamTexture(webCamDevice.name, width, height); break; } } if (webCamTexture == null) { webCamDevice = WebCamTexture.devices [0]; webCamTexture = new WebCamTexture(webCamDevice.name, width, height); } Debug.Log("width " + webCamTexture.width + " height " + webCamTexture.height + " fps " + webCamTexture.requestedFPS); // Starts the camera webCamTexture.Play(); while (true) { //If you want to use webcamTexture.width and webcamTexture.height on iOS, you have to wait until webcamTexture.didUpdateThisFrame == 1, otherwise these two values will be equal to 16. (http://forum.unity3d.com/threads/webcamtexture-and-error-0x0502.123922/) #if UNITY_IPHONE && !UNITY_EDITOR if (webCamTexture.width > 16 && webCamTexture.height > 16) { #else if (webCamTexture.didUpdateThisFrame) { #endif Debug.Log("width " + webCamTexture.width + " height " + webCamTexture.height + " fps " + webCamTexture.requestedFPS); Debug.Log("videoRotationAngle " + webCamTexture.videoRotationAngle + " videoVerticallyMirrored " + webCamTexture.videoVerticallyMirrored + " isFrongFacing " + webCamDevice.isFrontFacing); colors = new Color32[webCamTexture.width * webCamTexture.height]; rgbaMat = new Mat(webCamTexture.height, webCamTexture.width, CvType.CV_8UC4); grayMat = new Mat(webCamTexture.height, webCamTexture.width, CvType.CV_8UC1); texture = new Texture2D(webCamTexture.width, webCamTexture.height, TextureFormat.RGBA32, false); gameObject.transform.eulerAngles = new Vector3(0, 0, 0); #if (UNITY_ANDROID || UNITY_IPHONE) && !UNITY_EDITOR gameObject.transform.eulerAngles = new Vector3(0, 0, -90); #endif // gameObject.transform.rotation = gameObject.transform.rotation * Quaternion.AngleAxis (webCamTexture.videoRotationAngle, Vector3.back); gameObject.transform.localScale = new Vector3(webCamTexture.width, webCamTexture.height, 1); // bool videoVerticallyMirrored = webCamTexture.videoVerticallyMirrored; // float scaleX = 1; // float scaleY = videoVerticallyMirrored ? -1.0f : 1.0f; // if (webCamTexture.videoRotationAngle == 270) // scaleY = -1.0f; // gameObject.transform.localScale = new Vector3 (scaleX * gameObject.transform.localScale.x, scaleY * gameObject.transform.localScale.y, 1); cascade = new CascadeClassifier(Utils.getFilePath("haarcascade_frontalface_alt.xml")); faces = new MatOfRect(); gameObject.GetComponent <Renderer> ().material.mainTexture = texture; #if (UNITY_ANDROID || UNITY_IPHONE) && !UNITY_EDITOR Camera.main.orthographicSize = webCamTexture.width / 2; #else Camera.main.orthographicSize = webCamTexture.height / 2; #endif initDone = true; break; } else { yield return(0); } } } // Update is called once per frame void Update() { if (!initDone) { return; } #if UNITY_IPHONE && !UNITY_EDITOR if (webCamTexture.width > 16 && webCamTexture.height > 16) { #else if (webCamTexture.didUpdateThisFrame) { #endif Utils.webCamTextureToMat(webCamTexture, rgbaMat, colors); if (webCamTexture.videoVerticallyMirrored) { if (webCamDevice.isFrontFacing) { if (webCamTexture.videoRotationAngle == 0) { Core.flip(rgbaMat, rgbaMat, 1); } else if (webCamTexture.videoRotationAngle == 90) { Core.flip(rgbaMat, rgbaMat, 0); } else if (webCamTexture.videoRotationAngle == 270) { Core.flip(rgbaMat, rgbaMat, 1); } } else { if (webCamTexture.videoRotationAngle == 90) { } else if (webCamTexture.videoRotationAngle == 270) { Core.flip(rgbaMat, rgbaMat, -1); } } } else { if (webCamDevice.isFrontFacing) { if (webCamTexture.videoRotationAngle == 0) { Core.flip(rgbaMat, rgbaMat, 1); } else if (webCamTexture.videoRotationAngle == 90) { Core.flip(rgbaMat, rgbaMat, 0); } else if (webCamTexture.videoRotationAngle == 270) { Core.flip(rgbaMat, rgbaMat, 1); } } else { if (webCamTexture.videoRotationAngle == 90) { } else if (webCamTexture.videoRotationAngle == 270) { Core.flip(rgbaMat, rgbaMat, -1); } } } Imgproc.cvtColor(rgbaMat, grayMat, Imgproc.COLOR_RGBA2GRAY); Imgproc.equalizeHist(grayMat, grayMat); if (cascade != null) { cascade.detectMultiScale(grayMat, faces, 1.1, 2, 2, // TODO: objdetect.CV_HAAR_SCALE_IMAGE new Size(webCamTexture.width * 0.15, webCamTexture.width * 0.15), new Size()); } OpenCVForUnity.Rect[] rects = faces.toArray(); for (int i = 0; i < rects.Length; i++) { // Debug.Log ("detect faces " + rects [i]); Core.rectangle(rgbaMat, new Point(rects [i].x, rects [i].y), new Point(rects [i].x + rects [i].width, rects [i].y + rects [i].height), new Scalar(255, 0, 0, 255), 2); } Utils.matToTexture2D(rgbaMat, texture, colors); } } void OnDisable() { webCamTexture.Stop(); } void OnGUI() { float screenScale = Screen.width / 240.0f; Matrix4x4 scaledMatrix = Matrix4x4.Scale(new Vector3(screenScale, screenScale, screenScale)); GUI.matrix = scaledMatrix; GUILayout.BeginVertical(); if (GUILayout.Button("back")) { Application.LoadLevel("OpenCVForUnitySample"); } if (GUILayout.Button("change camera")) { isFrontFacing = !isFrontFacing; StartCoroutine(init()); } GUILayout.EndVertical(); } } }
public void OnFrameMatAcquired(Mat bgraMat, Matrix4x4 projectionMatrix, Matrix4x4 cameraToWorldMatrix) { Imgproc.cvtColor(bgraMat, grayMat, Imgproc.COLOR_BGRA2GRAY); Imgproc.equalizeHist(grayMat, grayMat); if (enableDetection && !isDetecting) { isDetecting = true; grayMat.copyTo(grayMat4Thread); System.Threading.Tasks.Task.Run(() => { isThreadRunning = true; DetectObject(); isThreadRunning = false; OnDetectionDone(); }); } Rect[] rects; if (!useSeparateDetection) { if (hasUpdatedDetectionResult) { hasUpdatedDetectionResult = false; lock (rectangleTracker) { rectangleTracker.UpdateTrackedObjects(detectionResult.toList()); } } lock (rectangleTracker) { rectangleTracker.GetObjects(resultObjects, true); } rects = resultObjects.ToArray(); } else { if (hasUpdatedDetectionResult) { hasUpdatedDetectionResult = false; //UnityEngine.WSA.Application.InvokeOnAppThread (() => { // Debug.Log("process: get rectsWhereRegions were got from detectionResult"); //}, true); lock (rectangleTracker) { rectsWhereRegions = detectionResult.toArray(); } rects = rectsWhereRegions; } else { //UnityEngine.WSA.Application.InvokeOnAppThread (() => { // Debug.Log("process: get rectsWhereRegions from previous positions"); //}, true); lock (rectangleTracker) { rectsWhereRegions = rectangleTracker.CreateCorrectionBySpeedOfRects(); } rects = rectsWhereRegions; } detectedObjectsInRegions.Clear(); if (rectsWhereRegions.Length > 0) { int len = rectsWhereRegions.Length; for (int i = 0; i < len; i++) { DetectInRegion(grayMat, rectsWhereRegions [i], detectedObjectsInRegions); } } lock (rectangleTracker) { rectangleTracker.UpdateTrackedObjects(detectedObjectsInRegions); rectangleTracker.GetObjects(resultObjects, true); } rects = resultObjects.ToArray(); } UnityEngine.WSA.Application.InvokeOnAppThread(() => { if (!webCamTextureToMatHelper.IsPlaying()) { return; } DrawRects(rects, bgraMat.width(), bgraMat.height()); bgraMat.Dispose(); Vector3 ccCameraSpacePos = UnProjectVector(projectionMatrix, new Vector3(0.0f, 0.0f, overlayDistance)); Vector3 tlCameraSpacePos = UnProjectVector(projectionMatrix, new Vector3(-overlayDistance, overlayDistance, overlayDistance)); //position Vector3 position = cameraToWorldMatrix.MultiplyPoint3x4(ccCameraSpacePos); gameObject.transform.position = position; //scale Vector3 scale = new Vector3(Mathf.Abs(tlCameraSpacePos.x - ccCameraSpacePos.x) * 2, Mathf.Abs(tlCameraSpacePos.y - ccCameraSpacePos.y) * 2, 1); gameObject.transform.localScale = scale; // Rotate the canvas object so that it faces the user. Quaternion rotation = Quaternion.LookRotation(-cameraToWorldMatrix.GetColumn(2), cameraToWorldMatrix.GetColumn(1)); gameObject.transform.rotation = rotation; rectOverlay.UpdateOverlayTransform(gameObject.transform); }, false); }
private IEnumerator init() { axes.SetActive(false); head.SetActive(false); rightEye.SetActive(false); leftEye.SetActive(false); mouth.SetActive(false); if (webCamTexture != null) { faceTracker.reset(); webCamTexture.Stop(); initDone = false; rgbaMat.Dispose(); grayMat.Dispose(); cascade.Dispose(); camMatrix.Dispose(); distCoeffs.Dispose(); } // Checks how many and which cameras are available on the device for (int cameraIndex = 0; cameraIndex < WebCamTexture.devices.Length; cameraIndex++) { if (WebCamTexture.devices [cameraIndex].isFrontFacing == isFrontFacing) { Debug.Log(cameraIndex + " name " + WebCamTexture.devices [cameraIndex].name + " isFrontFacing " + WebCamTexture.devices [cameraIndex].isFrontFacing); webCamDevice = WebCamTexture.devices [cameraIndex]; webCamTexture = new WebCamTexture(webCamDevice.name, width, height); break; } } if (webCamTexture == null) { webCamDevice = WebCamTexture.devices [0]; webCamTexture = new WebCamTexture(webCamDevice.name, width, height); } Debug.Log("width " + webCamTexture.width + " height " + webCamTexture.height + " fps " + webCamTexture.requestedFPS); // Starts the camera webCamTexture.Play(); while (true) { //If you want to use webcamTexture.width and webcamTexture.height on iOS, you have to wait until webcamTexture.didUpdateThisFrame == 1, otherwise these two values will be equal to 16. (http://forum.unity3d.com/threads/webcamtexture-and-error-0x0502.123922/) #if UNITY_IOS && !UNITY_EDITOR && (UNITY_4_6_3 || UNITY_4_6_4 || UNITY_5_0_0 || UNITY_5_0_1) if (webCamTexture.width > 16 && webCamTexture.height > 16) { #else if (webCamTexture.didUpdateThisFrame) { #endif Debug.Log("width " + webCamTexture.width + " height " + webCamTexture.height + " fps " + webCamTexture.requestedFPS); Debug.Log("videoRotationAngle " + webCamTexture.videoRotationAngle + " videoVerticallyMirrored " + webCamTexture.videoVerticallyMirrored + " isFrongFacing " + webCamDevice.isFrontFacing); colors = new Color32[webCamTexture.width * webCamTexture.height]; rgbaMat = new Mat(webCamTexture.height, webCamTexture.width, CvType.CV_8UC4); grayMat = new Mat(webCamTexture.height, webCamTexture.width, CvType.CV_8UC1); texture = new Texture2D(webCamTexture.width, webCamTexture.height, TextureFormat.RGBA32, false); cascade = new CascadeClassifier(Utils.getFilePath("haarcascade_frontalface_alt.xml")); if (cascade.empty()) { Debug.LogError("cascade file is not loaded.Please copy from “FaceTrackerSample/StreamingAssets/” to “Assets/StreamingAssets/” folder. "); } gameObject.transform.localScale = new Vector3(webCamTexture.width, webCamTexture.height, 1); gameObject.transform.localEulerAngles = new Vector3(0, 0, 0); // gameObject.transform.rotation = gameObject.transform.rotation * Quaternion.AngleAxis (webCamTexture.videoRotationAngle, Vector3.back); // bool _videoVerticallyMirrored = webCamTexture.videoVerticallyMirrored; // float scaleX = 1; // float scaleY = _videoVerticallyMirrored ? -1.0f : 1.0f; // gameObject.transform.localScale = new Vector3 (scaleX * gameObject.transform.localScale.x, scaleY * gameObject.transform.localScale.y, 1); gameObject.GetComponent <Renderer> ().material.mainTexture = texture; Camera.main.orthographicSize = webCamTexture.height / 2; int max_d = Mathf.Max(rgbaMat.rows(), rgbaMat.cols()); camMatrix = new Mat(3, 3, CvType.CV_64FC1); camMatrix.put(0, 0, max_d); camMatrix.put(0, 1, 0); camMatrix.put(0, 2, rgbaMat.cols() / 2.0f); camMatrix.put(1, 0, 0); camMatrix.put(1, 1, max_d); camMatrix.put(1, 2, rgbaMat.rows() / 2.0f); camMatrix.put(2, 0, 0); camMatrix.put(2, 1, 0); camMatrix.put(2, 2, 1.0f); Size imageSize = new Size(rgbaMat.cols(), rgbaMat.rows()); double apertureWidth = 0; double apertureHeight = 0; double[] fovx = new double[1]; double[] fovy = new double[1]; double[] focalLength = new double[1]; Point principalPoint = new Point(); double[] aspectratio = new double[1]; Calib3d.calibrationMatrixValues(camMatrix, imageSize, apertureWidth, apertureHeight, fovx, fovy, focalLength, principalPoint, aspectratio); Debug.Log("imageSize " + imageSize.ToString()); Debug.Log("apertureWidth " + apertureWidth); Debug.Log("apertureHeight " + apertureHeight); Debug.Log("fovx " + fovx [0]); Debug.Log("fovy " + fovy [0]); Debug.Log("focalLength " + focalLength [0]); Debug.Log("principalPoint " + principalPoint.ToString()); Debug.Log("aspectratio " + aspectratio [0]); ARCamera.fieldOfView = (float)fovy [0]; Debug.Log("camMatrix " + camMatrix.dump()); distCoeffs = new MatOfDouble(0, 0, 0, 0); Debug.Log("distCoeffs " + distCoeffs.dump()); lookAtM = getLookAtMatrix(new Vector3(0, 0, 0), new Vector3(0, 0, 1), new Vector3(0, -1, 0)); Debug.Log("lookAt " + lookAtM.ToString()); invertZM = Matrix4x4.TRS(Vector3.zero, Quaternion.identity, new Vector3(1, 1, -1)); initDone = true; break; } else { yield return(0); } } } // Update is called once per frame void Update() { if (!initDone) { return; } #if UNITY_IOS && !UNITY_EDITOR && (UNITY_4_6_3 || UNITY_4_6_4 || UNITY_5_0_0 || UNITY_5_0_1) if (webCamTexture.width > 16 && webCamTexture.height > 16) { #else if (webCamTexture.didUpdateThisFrame) { #endif Utils.webCamTextureToMat(webCamTexture, rgbaMat, colors); //flip to correct direction. if (webCamTexture.videoVerticallyMirrored) { if (webCamDevice.isFrontFacing) { if (webCamTexture.videoRotationAngle == 0) { Core.flip(rgbaMat, rgbaMat, -1); } else if (webCamTexture.videoRotationAngle == 180) { Core.flip(rgbaMat, rgbaMat, 0); } } else { if (webCamTexture.videoRotationAngle == 0) { } else if (webCamTexture.videoRotationAngle == 180) { Core.flip(rgbaMat, rgbaMat, 1); } } } else { if (webCamDevice.isFrontFacing) { if (webCamTexture.videoRotationAngle == 0) { Core.flip(rgbaMat, rgbaMat, 1); } else if (webCamTexture.videoRotationAngle == 180) { Core.flip(rgbaMat, rgbaMat, 0); } } else { if (webCamTexture.videoRotationAngle == 0) { } else if (webCamTexture.videoRotationAngle == 180) { Core.flip(rgbaMat, rgbaMat, -1); } } } //convert image to greyscale Imgproc.cvtColor(rgbaMat, grayMat, Imgproc.COLOR_RGBA2GRAY); if (faceTracker.getPoints().Count <= 0) { Debug.Log("detectFace"); //convert image to greyscale using (Mat equalizeHistMat = new Mat()) using (MatOfRect faces = new MatOfRect()) { Imgproc.equalizeHist(grayMat, equalizeHistMat); cascade.detectMultiScale(equalizeHistMat, faces, 1.1f, 2, 0 | Objdetect.CASCADE_FIND_BIGGEST_OBJECT | Objdetect.CASCADE_SCALE_IMAGE, new OpenCVForUnity.Size(equalizeHistMat.cols() * 0.15, equalizeHistMat.cols() * 0.15), new Size()); if (faces.rows() > 0) { Debug.Log("faces " + faces.dump()); //add initial face points from MatOfRect faceTracker.addPoints(faces); //draw face rect OpenCVForUnity.Rect[] rects = faces.toArray(); for (int i = 0; i < rects.Length; i++) { Core.rectangle(rgbaMat, new Point(rects [i].x, rects [i].y), new Point(rects [i].x + rects [i].width, rects [i].y + rects [i].height), new Scalar(255, 0, 0, 255), 2); } } } } //track face points.if face points <= 0, always return false. if (faceTracker.track(grayMat, faceTrackerParams)) { if (isDrawPoints) { faceTracker.draw(rgbaMat, new Scalar(255, 0, 0, 255), new Scalar(0, 255, 0, 255)); } Core.putText(rgbaMat, "'Tap' or 'Space Key' to Reset", new Point(5, rgbaMat.rows() - 5), Core.FONT_HERSHEY_SIMPLEX, 0.8, new Scalar(255, 255, 255, 255), 2, Core.LINE_AA, false); Point[] points = faceTracker.getPoints() [0]; if (points.Length > 0) { // for (int i = 0; i < points.Length; i++) { // Core.putText (rgbaMat, "" + i, new Point (points [i].x, points [i].y), Core.FONT_HERSHEY_SIMPLEX, 0.3, new Scalar (0, 0, 255, 255), 2, Core.LINE_AA, false); // } imagePoints.fromArray( points [31], //l eye points [36], //r eye points [67], //nose points [48], //l mouth points [54] //r mouth // , // points [1],//l ear // points [13]//r ear ); Calib3d.solvePnP(objectPoints, imagePoints, camMatrix, distCoeffs, rvec, tvec); bool isRefresh = false; if (tvec.get(2, 0) [0] > 0 && tvec.get(2, 0) [0] < 1200 * ((float)webCamTexture.width / (float)width)) { isRefresh = true; if (oldRvec == null) { oldRvec = new Mat(); rvec.copyTo(oldRvec); } if (oldTvec == null) { oldTvec = new Mat(); tvec.copyTo(oldTvec); } //filter Rvec Noise. using (Mat absDiffRvec = new Mat()) { Core.absdiff(rvec, oldRvec, absDiffRvec); // Debug.Log ("absDiffRvec " + absDiffRvec.dump()); using (Mat cmpRvec = new Mat()) { Core.compare(absDiffRvec, new Scalar(rvecNoiseFilterRange), cmpRvec, Core.CMP_GT); if (Core.countNonZero(cmpRvec) > 0) { isRefresh = false; } } } //filter Tvec Noise. using (Mat absDiffTvec = new Mat()) { Core.absdiff(tvec, oldTvec, absDiffTvec); // Debug.Log ("absDiffRvec " + absDiffRvec.dump()); using (Mat cmpTvec = new Mat()) { Core.compare(absDiffTvec, new Scalar(tvecNoiseFilterRange), cmpTvec, Core.CMP_GT); if (Core.countNonZero(cmpTvec) > 0) { isRefresh = false; } } } } if (isRefresh) { if (!rightEye.activeSelf) { rightEye.SetActive(true); } if (!leftEye.activeSelf) { leftEye.SetActive(true); } if ((Mathf.Abs((float)(points [48].x - points [56].x)) < Mathf.Abs((float)(points [31].x - points [36].x)) / 2.2 && Mathf.Abs((float)(points [51].y - points [57].y)) > Mathf.Abs((float)(points [31].x - points [36].x)) / 2.9) || Mathf.Abs((float)(points [51].y - points [57].y)) > Mathf.Abs((float)(points [31].x - points [36].x)) / 2.7) { if (!mouth.activeSelf) { mouth.SetActive(true); } } else { if (mouth.activeSelf) { mouth.SetActive(false); } } rvec.copyTo(oldRvec); tvec.copyTo(oldTvec); Calib3d.Rodrigues(rvec, rotM); transformationM.SetRow(0, new Vector4((float)rotM.get(0, 0) [0], (float)rotM.get(0, 1) [0], (float)rotM.get(0, 2) [0], (float)tvec.get(0, 0) [0])); transformationM.SetRow(1, new Vector4((float)rotM.get(1, 0) [0], (float)rotM.get(1, 1) [0], (float)rotM.get(1, 2) [0], (float)tvec.get(1, 0) [0])); transformationM.SetRow(2, new Vector4((float)rotM.get(2, 0) [0], (float)rotM.get(2, 1) [0], (float)rotM.get(2, 2) [0], (float)tvec.get(2, 0) [0])); transformationM.SetRow(3, new Vector4(0, 0, 0, 1)); modelViewMtrx = lookAtM * transformationM * invertZM; ARCamera.worldToCameraMatrix = modelViewMtrx; // Debug.Log ("modelViewMtrx " + modelViewMtrx.ToString()); } } } Utils.matToTexture2D(rgbaMat, texture, colors); } if (Input.GetKeyUp(KeyCode.Space) || Input.touchCount > 0) { faceTracker.reset(); if (oldRvec != null) { oldRvec.Dispose(); oldRvec = null; } if (oldTvec != null) { oldTvec.Dispose(); oldTvec = null; } ARCamera.ResetWorldToCameraMatrix(); rightEye.SetActive(false); leftEye.SetActive(false); mouth.SetActive(false); } } void OnDisable() { webCamTexture.Stop(); } private Matrix4x4 getLookAtMatrix(Vector3 pos, Vector3 target, Vector3 up) { Vector3 z = Vector3.Normalize(pos - target); Vector3 x = Vector3.Normalize(Vector3.Cross(up, z)); Vector3 y = Vector3.Normalize(Vector3.Cross(z, x)); Matrix4x4 result = new Matrix4x4(); result.SetRow(0, new Vector4(x.x, x.y, x.z, -(Vector3.Dot(pos, x)))); result.SetRow(1, new Vector4(y.x, y.y, y.z, -(Vector3.Dot(pos, y)))); result.SetRow(2, new Vector4(z.x, z.y, z.z, -(Vector3.Dot(pos, z)))); result.SetRow(3, new Vector4(0, 0, 0, 1)); return(result); } void OnGUI() { float screenScale = Screen.height / 240.0f; Matrix4x4 scaledMatrix = Matrix4x4.Scale(new Vector3(screenScale, screenScale, screenScale)); GUI.matrix = scaledMatrix; GUILayout.BeginVertical(); if (GUILayout.Button("back")) { Application.LoadLevel("FaceTrackerSample"); } if (GUILayout.Button("change camera")) { isFrontFacing = !isFrontFacing; StartCoroutine(init()); } if (GUILayout.Button("drawPoints")) { if (isDrawPoints) { isDrawPoints = false; } else { isDrawPoints = true; } } if (GUILayout.Button("axes")) { if (axes.activeSelf) { axes.SetActive(false); } else { axes.SetActive(true); } } if (GUILayout.Button("head")) { if (head.activeSelf) { head.SetActive(false); } else { head.SetActive(true); } } GUILayout.EndVertical(); } }
private void detectInRegion (Mat img, Rect r, List<Rect> detectedObjectsInRegions) { Rect r0 = new Rect (new Point (), img.size ()); Rect r1 = new Rect (r.x, r.y, r.width, r.height); Inflate (r1, (int)((r1.width * innerParameters.coeffTrackingWindowSize) - r1.width) / 2, (int)((r1.height * innerParameters.coeffTrackingWindowSize) - r1.height) / 2); r1 = Intersect (r0, r1); if ((r1.width <= 0) || (r1.height <= 0)) { Debug.Log ("DetectionBasedTracker::detectInRegion: Empty intersection"); return; } int d = Math.Min (r.width, r.height); d = (int)Math.Round (d * innerParameters.coeffObjectSizeToTrack); MatOfRect tmpobjects = new MatOfRect (); Mat img1 = new Mat (img, r1);//subimage for rectangle -- without data copying regionCascade.detectMultiScale (img1, tmpobjects, parameters.scaleFactor, parameters.minNeighbors, 0 | Objdetect.CASCADE_DO_CANNY_PRUNING | Objdetect.CASCADE_SCALE_IMAGE | Objdetect.CASCADE_FIND_BIGGEST_OBJECT, new Size (d, d), new Size ()); Rect[] tmpobjectsArray = tmpobjects.toArray (); int len = tmpobjectsArray.Length; for (int i = 0; i < len; i++) { Rect tmp = tmpobjectsArray [i]; Rect curres = new Rect (new Point (tmp.x + r1.x, tmp.y + r1.y), tmp.size ()); detectedObjectsInRegions.Add (curres); } }
// Update is called once per frame void Update() { if (!initDone) { return; } if (screenOrientation != Screen.orientation) { screenOrientation = Screen.orientation; updateLayout(); } #if UNITY_IOS && !UNITY_EDITOR && (UNITY_4_6_3 || UNITY_4_6_4 || UNITY_5_0_0 || UNITY_5_0_1) if (webCamTexture.width > 16 && webCamTexture.height > 16) { #else if (webCamTexture.didUpdateThisFrame) { #endif Utils.webCamTextureToMat(webCamTexture, rgbaMat, colors); if (webCamDevice.isFrontFacing) { if (webCamTexture.videoRotationAngle == 0) { Core.flip(rgbaMat, rgbaMat, 1); } else if (webCamTexture.videoRotationAngle == 90) { Core.flip(rgbaMat, rgbaMat, 0); } if (webCamTexture.videoRotationAngle == 180) { Core.flip(rgbaMat, rgbaMat, 0); } else if (webCamTexture.videoRotationAngle == 270) { Core.flip(rgbaMat, rgbaMat, 1); } } else { if (webCamTexture.videoRotationAngle == 180) { Core.flip(rgbaMat, rgbaMat, -1); } else if (webCamTexture.videoRotationAngle == 270) { Core.flip(rgbaMat, rgbaMat, -1); } } Imgproc.cvtColor(rgbaMat, grayMat, Imgproc.COLOR_RGBA2GRAY); Imgproc.equalizeHist(grayMat, grayMat); if (cascade != null) { cascade.detectMultiScale(grayMat, faces, 1.1, 2, 2, // TODO: objdetect.CV_HAAR_SCALE_IMAGE new Size(webCamTexture.height * 0.2, webCamTexture.height * 0.2), new Size()); } OpenCVForUnity.Rect[] rects = faces.toArray(); for (int i = 0; i < rects.Length; i++) { // Debug.Log ("detect faces " + rects [i]); Imgproc.rectangle(rgbaMat, new Point(rects[i].x, rects[i].y), new Point(rects[i].x + rects[i].width, rects[i].y + rects[i].height), new Scalar(255, 0, 0, 255), 2); } Utils.matToTexture2D(rgbaMat, texture, colors); } } void OnDisable() { webCamTexture.Stop(); } void OnGUI() { float screenScale = Screen.height / 240.0f; Matrix4x4 scaledMatrix = Matrix4x4.Scale(new Vector3(screenScale, screenScale, screenScale)); GUI.matrix = scaledMatrix; GUILayout.BeginVertical(); if (GUILayout.Button("back")) { Application.LoadLevel("OpenCVForUnitySample"); } if (GUILayout.Button("change camera")) { shouldUseFrontFacing = !shouldUseFrontFacing; StartCoroutine(init()); } GUILayout.EndVertical(); } }
public void OnFrameMatAcquired(Mat bgraMat, Matrix4x4 projectionMatrix, Matrix4x4 cameraToWorldMatrix) { Mat downScaleFrameMat = imageOptimizationHelper.GetDownScaleMat(bgraMat); Imgproc.cvtColor(downScaleFrameMat, grayMat, Imgproc.COLOR_BGRA2GRAY); Imgproc.equalizeHist(grayMat, grayMat); if (enableDetection && !isDetecting) { isDetecting = true; grayMat.copyTo(grayMat4Thread); System.Threading.Tasks.Task.Run(() => { isThreadRunning = true; DetectObject(); isThreadRunning = false; OnDetectionDone(); }); } OpenCVForUnityUtils.SetImage(faceLandmarkDetector, grayMat); Mat bgraMat4preview = null; if (displayCameraPreview) { bgraMat4preview = new Mat(); downScaleFrameMat.copyTo(bgraMat4preview); } List <Vector2> points = null; Rect[] rects; if (!useSeparateDetection) { if (hasUpdatedDetectionResult) { hasUpdatedDetectionResult = false; lock (rectangleTracker) { rectangleTracker.UpdateTrackedObjects(detectionResult.toList()); } } lock (rectangleTracker) { rectangleTracker.GetObjects(resultObjects, true); } rects = resultObjects.ToArray(); if (rects.Length > 0) { OpenCVForUnity.Rect rect = rects [0]; // correct the deviation of the detection result of the face rectangle of OpenCV and Dlib. rect.y += (int)(rect.height * 0.1f); //detect landmark points points = faceLandmarkDetector.DetectLandmark(new UnityEngine.Rect(rect.x, rect.y, rect.width, rect.height)); if (enableOpticalFlowFilter) { opticalFlowFilter.Process(bgraMat, points, points, false); } if (displayCameraPreview && bgraMat4preview != null) { //draw landmark points OpenCVForUnityUtils.DrawFaceLandmark(bgraMat4preview, points, new Scalar(0, 255, 0, 255), 2); } } } else { if (hasUpdatedDetectionResult) { hasUpdatedDetectionResult = false; //UnityEngine.WSA.Application.InvokeOnAppThread (() => { // Debug.Log("process: get rectsWhereRegions were got from detectionResult"); //}, true); lock (rectangleTracker) { rectsWhereRegions = detectionResult.toArray(); } } else { //UnityEngine.WSA.Application.InvokeOnAppThread (() => { // Debug.Log("process: get rectsWhereRegions from previous positions"); //}, true); lock (rectangleTracker) { rectsWhereRegions = rectangleTracker.CreateCorrectionBySpeedOfRects(); } } detectedObjectsInRegions.Clear(); if (rectsWhereRegions.Length > 0) { int len = rectsWhereRegions.Length; for (int i = 0; i < len; i++) { DetectInRegion(grayMat, rectsWhereRegions [i], detectedObjectsInRegions); } } lock (rectangleTracker) { rectangleTracker.UpdateTrackedObjects(detectedObjectsInRegions); rectangleTracker.GetObjects(resultObjects, true); } if (resultObjects.Count > 0) { OpenCVForUnity.Rect rect = resultObjects [0]; // correct the deviation of the detection result of the face rectangle of OpenCV and Dlib. rect.y += (int)(rect.height * 0.1f); //detect landmark points points = faceLandmarkDetector.DetectLandmark(new UnityEngine.Rect(rect.x, rect.y, rect.width, rect.height)); if (enableOpticalFlowFilter) { opticalFlowFilter.Process(bgraMat, points, points, false); } if (displayCameraPreview && bgraMat4preview != null) { //draw landmark points OpenCVForUnityUtils.DrawFaceLandmark(bgraMat4preview, points, new Scalar(0, 255, 0, 255), 2); } } } UnityEngine.WSA.Application.InvokeOnAppThread(() => { if (!webCamTextureToMatHelper.IsPlaying()) { return; } if (displayCameraPreview && bgraMat4preview != null) { OpenCVForUnity.Utils.fastMatToTexture2D(bgraMat4preview, texture); } if (points != null) { UpdateARHeadTransform(points, cameraToWorldMatrix); } bgraMat.Dispose(); if (bgraMat4preview != null) { bgraMat4preview.Dispose(); } }, false); }
// Update is called once per frame void Update() { if (webCamTextureToMatHelper.IsPlaying() && webCamTextureToMatHelper.DidUpdateThisFrame()) { Mat rgbaMat = webCamTextureToMatHelper.GetMat(); Imgproc.cvtColor(rgbaMat, grayMat, Imgproc.COLOR_RGBA2GRAY); Imgproc.equalizeHist(grayMat, grayMat); if (cascade != null) { cascade.detectMultiScale(grayMat, faces, 1.1, 2, 2, // TODO: objdetect.CV_HAAR_SCALE_IMAGE new Size(grayMat.cols() * 0.2, grayMat.rows() * 0.2), new Size()); } OpenCVForUnity.Rect[] rects = faces.toArray(); for (int i = 0; i < rects.Length; i++) { Imgproc.rectangle(rgbaMat, new Point(rects [i].x, rects [i].y), new Point(rects [i].x + rects [i].width, rects [i].y + rects [i].height), new Scalar(255, 0, 0, 255), 2); //make the coordinates for the rectangle accessible elPosX = rects [i].x + rects [i].width; elPosY = rects [i].y + rects [i].height; } if (rects.Length > 0) { //yay we see a face Debug.Log("Update: Face detected!!!"); //start the party (once) //partyObject.SetActive (true); //set the hat graphic true //TODO: need to figure out a way for this to not flicker so much //m_partyHat.SetActive (true); //update the position of the hat // TODO: need to provide an offset based on the size of the rectangle (i think!) m_partyHat.transform.position = new Vector3(elPosX, elPosY, 0); } else { //set the hat graphic false Debug.Log("Update: No face yet!"); //m_partyHat.SetActive (false); } //float elPosX = rects [0].x; //float elPosX = 400f; //float elPosY = rects [0].y; //float elPosY = 100f; //Debug.Log (elPosX + ", " + elPosY); //m_partyHat.transform.position = new Vector3 (elPosX, elPosY, 0); // Imgproc.putText (rgbaMat, "W:" + rgbaMat.width () + " H:" + rgbaMat.height () + " SO:" + Screen.orientation, new Point (5, rgbaMat.rows () - 10), Core.FONT_HERSHEY_SIMPLEX, 1.0, new Scalar (255, 255, 255, 255), 2, Imgproc.LINE_AA, false); Utils.matToTexture2D(rgbaMat, texture, webCamTextureToMatHelper.GetBufferColors()); } }
// Update is called once per frame void Update () { if (!initDone) return; if (screenOrientation != Screen.orientation) { screenOrientation = Screen.orientation; updateLayout (); } #if UNITY_IOS && !UNITY_EDITOR && (UNITY_4_6_3 || UNITY_4_6_4 || UNITY_5_0_0 || UNITY_5_0_1) if (webCamTexture.width > 16 && webCamTexture.height > 16) { #else if (webCamTexture.didUpdateThisFrame) { #endif Utils.webCamTextureToMat (webCamTexture, rgbaMat, colors); //flip to correct direction. if (webCamDevice.isFrontFacing) { if (webCamTexture.videoRotationAngle == 0) { Core.flip (rgbaMat, rgbaMat, 1); } else if (webCamTexture.videoRotationAngle == 90) { Core.flip (rgbaMat, rgbaMat, 0); } if (webCamTexture.videoRotationAngle == 180) { Core.flip (rgbaMat, rgbaMat, 0); } else if (webCamTexture.videoRotationAngle == 270) { Core.flip (rgbaMat, rgbaMat, 1); } } else { if (webCamTexture.videoRotationAngle == 180) { Core.flip (rgbaMat, rgbaMat, -1); } else if (webCamTexture.videoRotationAngle == 270) { Core.flip (rgbaMat, rgbaMat, -1); } } //convert image to greyscale Imgproc.cvtColor (rgbaMat, grayMat, Imgproc.COLOR_RGBA2GRAY); if (faceTracker.getPoints ().Count <= 0) { Debug.Log ("detectFace"); //convert image to greyscale using (Mat equalizeHistMat = new Mat ()) using (MatOfRect faces = new MatOfRect ()) { Imgproc.equalizeHist (grayMat, equalizeHistMat); cascade.detectMultiScale (equalizeHistMat, faces, 1.1f, 2, 0 // | Objdetect.CASCADE_FIND_BIGGEST_OBJECT | Objdetect.CASCADE_SCALE_IMAGE, new OpenCVForUnity.Size (equalizeHistMat.cols () * 0.15, equalizeHistMat.cols () * 0.15), new Size ()); if (faces.rows () > 0) { Debug.Log ("faces " + faces.dump ()); //add initial face points from MatOfRect faceTracker.addPoints (faces); //draw face rect OpenCVForUnity.Rect[] rects = faces.toArray (); for (int i = 0; i < rects.Length; i++) { Core.rectangle (rgbaMat, new Point (rects [i].x, rects [i].y), new Point (rects [i].x + rects [i].width, rects [i].y + rects [i].height), new Scalar (255, 0, 0, 255), 2); } } } } //track face points.if face points <= 0, always return false. if (faceTracker.track (grayMat, faceTrackerParams)) faceTracker.draw (rgbaMat, new Scalar (255, 0, 0, 255), new Scalar (0, 255, 0, 255)); Core.putText (rgbaMat, "'Tap' or 'Space Key' to Reset", new Point (5, rgbaMat.rows () - 5), Core.FONT_HERSHEY_SIMPLEX, 0.8, new Scalar (255, 255, 255, 255), 2, Core.LINE_AA, false); Utils.matToTexture2D (rgbaMat, texture, colors); } if (Input.GetKeyUp (KeyCode.Space) || Input.touchCount > 0) { faceTracker.reset (); } } void OnDisable () { webCamTexture.Stop (); }
// 更新は、フレームごとに呼ばれます void Update() { if (!initDone) { return; } if (screenOrientation != Screen.orientation) { screenOrientation = Screen.orientation; updateLayout(); } #if UNITY_IOS && !UNITY_EDITOR && (UNITY_4_6_3 || UNITY_4_6_4 || UNITY_5_0_0 || UNITY_5_0_1) if (webCamTexture.width > 16 && webCamTexture.height > 16) { #else if (webCamTexture.didUpdateThisFrame) { #endif Utils.webCamTextureToMat(webCamTexture, rgbaMat, colors); // 方向を修正するために反転. if (webCamDevice.isFrontFacing) { if (webCamTexture.videoRotationAngle == 0) { Core.flip(rgbaMat, rgbaMat, 1); } else if (webCamTexture.videoRotationAngle == 90) { Core.flip(rgbaMat, rgbaMat, 0); } if (webCamTexture.videoRotationAngle == 180) { Core.flip(rgbaMat, rgbaMat, 0); } else if (webCamTexture.videoRotationAngle == 270) { Core.flip(rgbaMat, rgbaMat, 1); } } else { if (webCamTexture.videoRotationAngle == 180) { Core.flip(rgbaMat, rgbaMat, -1); } else if (webCamTexture.videoRotationAngle == 270) { Core.flip(rgbaMat, rgbaMat, -1); } } // グレースケールにイメージを変換します Imgproc.cvtColor(rgbaMat, grayMat, Imgproc.COLOR_RGBA2GRAY); if (faceTracker.getPoints().Count <= 0) { Debug.Log("detectFace"); // グレースケールにイメージを変換します using (Mat equalizeHistMat = new Mat()) using (MatOfRect faces = new MatOfRect()) { Imgproc.equalizeHist(grayMat, equalizeHistMat); cascade.detectMultiScale(equalizeHistMat, faces, 1.1f, 2, 0 | Objdetect.CASCADE_FIND_BIGGEST_OBJECT | Objdetect.CASCADE_SCALE_IMAGE, new OpenCVForUnity.Size(equalizeHistMat.cols() * 0.15, equalizeHistMat.cols() * 0.15), new Size()); if (faces.rows() > 0) { Debug.Log("--------------------------faces"); Debug.Log("faces " + faces.dump()); Debug.Log("--------------------------faces"); // MatOfRectから顔の初期座標を追加 faceTracker.addPoints(faces); // 顔の四角形を描きます OpenCVForUnity.Rect[] rects = faces.toArray(); for (int i = 0; i < rects.Length; i++) { Core.rectangle(rgbaMat, new Point(rects[i].x, rects[i].y), new Point(rects[i].x + rects[i].width, rects[i].y + rects[i].height), new Scalar(255, 0, 0, 255), 2); } } } } // 顔の座標を追跡します.if face points <= 0, always return false. if (faceTracker.track(grayMat, faceTrackerParams)) { if (isDrawPoints) { //Debug.Log("--------------------------100-----------------------------------"); faceTracker.draw(rgbaMat, new Scalar(255, 0, 0, 255), new Scalar(0, 255, 0, 255)); } //Debug.Log("--------------------------1000"); //Core.putText(rgbaMat, "'Tap' or 'Space Key' to Reset", new Point(5, rgbaMat.rows() - 5), Core.FONT_HERSHEY_SIMPLEX, 0.8, new Scalar(255, 255, 255, 255), 2, Core.LINE_AA, false); Point[] points = faceTracker.getPoints()[0]; //eyeWidth = points[36] - points[31]; /* * Debug.Log("--------------------------0"); * Debug.Log(points[31]); * Debug.Log("--------------------------2"); * Debug.Log(points[36]); * Debug.Log("--------------------------1"); */ if (points.Length > 0) { imagePoints.fromArray( points[31], //l eye points[36], //r eye points[67], // nose points[48], //l mouth points[54] //r mouth ); Calib3d.solvePnP(objectPoints, imagePoints, camMatrix, distCoeffs, rvec, tvec); bool isRefresh = false; if (tvec.get(2, 0)[0] > 0 && tvec.get(2, 0)[0] < 1200 * ((float)webCamTexture.width / (float)width)) { isRefresh = true; if (oldRvec == null) { oldRvec = new Mat(); rvec.copyTo(oldRvec); } if (oldTvec == null) { oldTvec = new Mat(); tvec.copyTo(oldTvec); } // Rvecノイズをフィルタリング. using (Mat absDiffRvec = new Mat()) { Core.absdiff(rvec, oldRvec, absDiffRvec); using (Mat cmpRvec = new Mat()) { Core.compare(absDiffRvec, new Scalar(rvecNoiseFilterRange), cmpRvec, Core.CMP_GT); if (Core.countNonZero(cmpRvec) > 0) { isRefresh = false; } } } // Tvecノイズをフィルタリング. using (Mat absDiffTvec = new Mat()) { Core.absdiff(tvec, oldTvec, absDiffTvec); using (Mat cmpTvec = new Mat()) { Core.compare(absDiffTvec, new Scalar(tvecNoiseFilterRange), cmpTvec, Core.CMP_GT); if (Core.countNonZero(cmpTvec) > 0) { isRefresh = false; } } } } if (isRefresh) { if (!rightEye.activeSelf) { rightEye.SetActive(true); } if (!leftEye.activeSelf) { leftEye.SetActive(true); } rvec.copyTo(oldRvec); tvec.copyTo(oldTvec); Calib3d.Rodrigues(rvec, rotM); transformationM.SetRow(0, new Vector4((float)rotM.get(0, 0)[0], (float)rotM.get(0, 1)[0], (float)rotM.get(0, 2)[0], (float)tvec.get(0, 0)[0])); transformationM.SetRow(1, new Vector4((float)rotM.get(1, 0)[0], (float)rotM.get(1, 1)[0], (float)rotM.get(1, 2)[0], (float)tvec.get(1, 0)[0])); transformationM.SetRow(2, new Vector4((float)rotM.get(2, 0)[0], (float)rotM.get(2, 1)[0], (float)rotM.get(2, 2)[0], (float)tvec.get(2, 0)[0])); transformationM.SetRow(3, new Vector4(0, 0, 0, 1)); modelViewMtrx = lookAtM * transformationM * invertZM; ARCamera.worldToCameraMatrix = modelViewMtrx; } } } Utils.matToTexture2D(rgbaMat, texture, colors); } if (Input.GetKeyUp(KeyCode.Space) || Input.touchCount > 0) { faceTracker.reset(); if (oldRvec != null) { oldRvec.Dispose(); oldRvec = null; } if (oldTvec != null) { oldTvec.Dispose(); oldTvec = null; } ARCamera.ResetWorldToCameraMatrix(); rightEye.SetActive(false); leftEye.SetActive(false); } } void OnDisable() { webCamTexture.Stop(); }
public override void UpdateValue() { if (cascade == null) { return; } if (matSourceGetterInterface == null) { return; } didUpdateFaceRect = false; Mat rgbaMat = (useDownScaleMat) ? matSourceGetterInterface.GetDownScaleMatSource() : matSourceGetterInterface.GetMatSource(); if (rgbaMat != null) { if (isDebugMode && screen != null) { if (debugMat != null && (debugMat.width() != rgbaMat.width() || debugMat.height() != rgbaMat.height())) { debugMat.Dispose(); debugMat = null; } debugMat = debugMat ?? new Mat(rgbaMat.rows(), rgbaMat.cols(), rgbaMat.type()); if (hideImage) { debugMat.setTo(new Scalar(0, 0, 0, 255)); } else { rgbaMat.copyTo(debugMat); } if (debugTexture != null && (debugTexture.width != debugMat.width() || debugTexture.height != debugMat.height())) { Texture2D.Destroy(debugTexture); debugTexture = null; } if (debugTexture == null) { debugTexture = new Texture2D(debugMat.width(), debugMat.height(), TextureFormat.RGBA32, false, false); Vector2 size = screen.rectTransform.sizeDelta; screen.rectTransform.sizeDelta = new Vector2(size.x, size.x * (float)debugMat.height() / (float)debugMat.width()); } if (debugColors != null && debugColors.Length != debugMat.width() * debugMat.height()) { debugColors = new Color32[debugMat.width() * debugMat.height()]; } screen.texture = debugTexture; screen.enabled = true; } else { if (screen != null) { screen.enabled = false; } } Imgproc.cvtColor(rgbaMat, grayMat, Imgproc.COLOR_RGBA2GRAY); Imgproc.equalizeHist(grayMat, grayMat); if (cascade != null) { cascade.detectMultiScale(grayMat, faces, 1.1, 2, 2, // TODO: objdetect.CV_HAAR_SCALE_IMAGE new Size(grayMat.cols() * 0.2, grayMat.rows() * 0.2), new Size()); } Rect[] rects = faces.toArray(); for (int i = 0; i < rects.Length; i++) { if (i == 0) { Rect r = rects [i]; if (useDownScaleMat) { // restore to original size rect float downscaleRatio = matSourceGetterInterface.GetDownScaleRatio(); faceRect = new UnityEngine.Rect( r.x * downscaleRatio, r.y * downscaleRatio, r.width * downscaleRatio, r.height * downscaleRatio ); } else { faceRect = new UnityEngine.Rect(r.x, r.y, r.width, r.height); } didUpdateFaceRect = true; //Debug.Log ("detect faces " + rects [i]); if (isDebugMode && screen != null) { Imgproc.rectangle(debugMat, new Point(r.x, r.y), new Point(r.x + r.width, r.y + r.height), new Scalar(255, 0, 0, 255), 2); } } } //Imgproc.putText (debugMat, "W:" + debugMat.width () + " H:" + debugMat.height () + " SO:" + Screen.orientation, new Point (5, debugMat.rows () - 10), Imgproc.FONT_HERSHEY_SIMPLEX, 0.5, new Scalar (255, 255, 255, 255), 1, Imgproc.LINE_AA, false); if (isDebugMode && screen != null) { OpenCVForUnity.UnityUtils.Utils.matToTexture2D(debugMat, debugTexture, debugColors); } } }
// Update is called once per frame void Update () { if (webCamTextureToMatHelper.isPlaying () && webCamTextureToMatHelper.didUpdateThisFrame ()) { Mat rgbaMat = webCamTextureToMatHelper.GetMat (); //convert image to greyscale Imgproc.cvtColor (rgbaMat, grayMat, Imgproc.COLOR_RGBA2GRAY); if (faceTracker.getPoints ().Count <= 0) { Debug.Log ("detectFace"); //convert image to greyscale using (Mat equalizeHistMat = new Mat ()) using (MatOfRect faces = new MatOfRect ()) { Imgproc.equalizeHist (grayMat, equalizeHistMat); cascade.detectMultiScale (equalizeHistMat, faces, 1.1f, 2, 0 // | Objdetect.CASCADE_FIND_BIGGEST_OBJECT | Objdetect.CASCADE_SCALE_IMAGE, new OpenCVForUnity.Size (equalizeHistMat.cols () * 0.15, equalizeHistMat.cols () * 0.15), new Size ()); if (faces.rows () > 0) { Debug.Log ("faces " + faces.dump ()); //add initial face points from MatOfRect faceTracker.addPoints (faces); //draw face rect OpenCVForUnity.Rect[] rects = faces.toArray (); for (int i = 0; i < rects.Length; i++) { #if OPENCV_3 Imgproc.rectangle(rgbaMat, new Point(rects[i].x, rects[i].y), new Point(rects[i].x + rects[i].width, rects[i].y + rects[i].height), new Scalar(255, 0, 0, 255), 2); #else Core.rectangle (rgbaMat, new Point (rects [i].x, rects [i].y), new Point (rects [i].x + rects [i].width, rects [i].y + rects [i].height), new Scalar (255, 0, 0, 255), 2); #endif } } } } //track face points.if face points <= 0, always return false. if (faceTracker.track (grayMat, faceTrackerParams)) faceTracker.draw (rgbaMat, new Scalar (255, 0, 0, 255), new Scalar (0, 255, 0, 255)); #if OPENCV_3 Imgproc.putText(rgbaMat, "'Tap' or 'Space Key' to Reset", new Point(5, rgbaMat.rows() - 5), Core.FONT_HERSHEY_SIMPLEX, 0.8, new Scalar(255, 255, 255, 255), 2, Imgproc.LINE_AA, false); #else Core.putText (rgbaMat, "'Tap' or 'Space Key' to Reset", new Point (5, rgbaMat.rows () - 5), Core.FONT_HERSHEY_SIMPLEX, 0.8, new Scalar (255, 255, 255, 255), 2, Core.LINE_AA, false); #endif // Core.putText (rgbaMat, "W:" + rgbaMat.width () + " H:" + rgbaMat.height () + " SO:" + Screen.orientation, new Point (5, rgbaMat.rows () - 10), Core.FONT_HERSHEY_SIMPLEX, 1.0, new Scalar (255, 255, 255, 255), 2, Core.LINE_AA, false); Utils.matToTexture2D (rgbaMat, texture, colors); } if (Input.GetKeyUp (KeyCode.Space) || Input.touchCount > 0) { faceTracker.reset (); } }
private IEnumerator init() { if (webCamTexture != null) { faceTracker.reset (); webCamTexture.Stop (); initDone = false; rgbaMat.Dispose (); grayMat.Dispose (); } // Checks how many and which cameras are available on the device for (int cameraIndex = 0; cameraIndex < WebCamTexture.devices.Length; cameraIndex++) { if (WebCamTexture.devices [cameraIndex].isFrontFacing == isFrontFacing) { Debug.Log (cameraIndex + " name " + WebCamTexture.devices [cameraIndex].name + " isFrontFacing " + WebCamTexture.devices [cameraIndex].isFrontFacing); webCamDevice = WebCamTexture.devices [cameraIndex]; webCamTexture = new WebCamTexture (webCamDevice.name, width, height); break; } } if (webCamTexture == null) { webCamDevice = WebCamTexture.devices [0]; webCamTexture = new WebCamTexture (webCamDevice.name, width, height); } Debug.Log ("width " + webCamTexture.width + " height " + webCamTexture.height + " fps " + webCamTexture.requestedFPS); // Starts the camera webCamTexture.Play (); while (true) { //If you want to use webcamTexture.width and webcamTexture.height on iOS, you have to wait until webcamTexture.didUpdateThisFrame == 1, otherwise these two values will be equal to 16. (http://forum.unity3d.com/threads/webcamtexture-and-error-0x0502.123922/) #if UNITY_IOS && !UNITY_EDITOR && (UNITY_4_6_3 || UNITY_4_6_4 || UNITY_5_0_0 || UNITY_5_0_1) if (webCamTexture.width > 16 && webCamTexture.height > 16) { #else if (webCamTexture.didUpdateThisFrame) { #endif Debug.Log ("width " + webCamTexture.width + " height " + webCamTexture.height + " fps " + webCamTexture.requestedFPS); Debug.Log ("videoRotationAngle " + webCamTexture.videoRotationAngle + " videoVerticallyMirrored " + webCamTexture.videoVerticallyMirrored + " isFrongFacing " + webCamDevice.isFrontFacing); colors = new Color32[webCamTexture.width * webCamTexture.height]; rgbaMat = new Mat (webCamTexture.height, webCamTexture.width, CvType.CV_8UC4); grayMat = new Mat (webCamTexture.height, webCamTexture.width, CvType.CV_8UC1); texture = new Texture2D (webCamTexture.width, webCamTexture.height, TextureFormat.RGBA32, false); cascade = new CascadeClassifier (Utils.getFilePath ("haarcascade_frontalface_alt.xml")); if (cascade.empty ()) { Debug.LogError ("cascade file is not loaded.Please copy from “FaceTrackerSample/StreamingAssets/” to “Assets/StreamingAssets/” folder. "); } gameObject.transform.localEulerAngles = new Vector3 (0, 0, 0); // gameObject.transform.rotation = gameObject.transform.rotation * Quaternion.AngleAxis (webCamTexture.videoRotationAngle, Vector3.back); gameObject.transform.localScale = new Vector3 (webCamTexture.width, webCamTexture.height, 1); // bool _videoVerticallyMirrored = webCamTexture.videoVerticallyMirrored; // float scaleX = 1; // float scaleY = _videoVerticallyMirrored ? -1.0f : 1.0f; // gameObject.transform.localScale = new Vector3 (scaleX * gameObject.transform.localScale.x, scaleY * gameObject.transform.localScale.y, 1); gameObject.GetComponent<Renderer> ().material.mainTexture = texture; Camera.main.orthographicSize = webCamTexture.height / 2; initDone = true; break; } else { yield return 0; } } } // Update is called once per frame void Update () { if (!initDone) return; #if UNITY_IOS && !UNITY_EDITOR && (UNITY_4_6_3 || UNITY_4_6_4 || UNITY_5_0_0 || UNITY_5_0_1) if (webCamTexture.width > 16 && webCamTexture.height > 16) { #else if (webCamTexture.didUpdateThisFrame) { #endif Utils.webCamTextureToMat (webCamTexture, rgbaMat, colors); //flip to correct direction. if (webCamTexture.videoVerticallyMirrored) { if (webCamDevice.isFrontFacing) { if (webCamTexture.videoRotationAngle == 0) { Core.flip (rgbaMat, rgbaMat, -1); } else if (webCamTexture.videoRotationAngle == 180) { Core.flip (rgbaMat, rgbaMat, 0); } } else { if (webCamTexture.videoRotationAngle == 0) { } else if (webCamTexture.videoRotationAngle == 180) { Core.flip (rgbaMat, rgbaMat, 1); } } } else { if (webCamDevice.isFrontFacing) { if (webCamTexture.videoRotationAngle == 0) { Core.flip (rgbaMat, rgbaMat, 1); } else if (webCamTexture.videoRotationAngle == 180) { Core.flip (rgbaMat, rgbaMat, 0); } } else { if (webCamTexture.videoRotationAngle == 0) { } else if (webCamTexture.videoRotationAngle == 180) { Core.flip (rgbaMat, rgbaMat, -1); } } } //convert image to greyscale Imgproc.cvtColor (rgbaMat, grayMat, Imgproc.COLOR_RGBA2GRAY); if (faceTracker.getPoints ().Count <= 0) { Debug.Log ("detectFace"); //convert image to greyscale using (Mat equalizeHistMat = new Mat ()) using (MatOfRect faces = new MatOfRect ()) { Imgproc.equalizeHist (grayMat, equalizeHistMat); cascade.detectMultiScale (equalizeHistMat, faces, 1.1f, 2, 0 // | Objdetect.CASCADE_FIND_BIGGEST_OBJECT | Objdetect.CASCADE_SCALE_IMAGE, new OpenCVForUnity.Size (equalizeHistMat.cols () * 0.15, equalizeHistMat.cols () * 0.15), new Size ()); if (faces.rows () > 0) { Debug.Log ("faces " + faces.dump ()); //add initial face points from MatOfRect faceTracker.addPoints (faces); //draw face rect OpenCVForUnity.Rect[] rects = faces.toArray (); for (int i = 0; i < rects.Length; i++) { Core.rectangle (rgbaMat, new Point (rects [i].x, rects [i].y), new Point (rects [i].x + rects [i].width, rects [i].y + rects [i].height), new Scalar (255, 0, 0, 255), 2); } } } } //track face points.if face points <= 0, always return false. if (faceTracker.track (grayMat, faceTrackerParams)) faceTracker.draw (rgbaMat, new Scalar (255, 0, 0, 255), new Scalar (0, 255, 0, 255)); Core.putText (rgbaMat, "'Tap' or 'Space Key' to Reset", new Point (5, rgbaMat.rows () - 5), Core.FONT_HERSHEY_SIMPLEX, 0.8, new Scalar (255, 255, 255, 255), 2, Core.LINE_AA, false); Utils.matToTexture2D (rgbaMat, texture, colors); } if (Input.GetKeyUp (KeyCode.Space) || Input.touchCount > 0) { faceTracker.reset (); } } void OnDisable () { webCamTexture.Stop (); } void OnGUI () { float screenScale = Screen.height / 240.0f; Matrix4x4 scaledMatrix = Matrix4x4.Scale (new Vector3 (screenScale, screenScale, screenScale)); GUI.matrix = scaledMatrix; GUILayout.BeginVertical (); if (GUILayout.Button ("back")) { Application.LoadLevel ("FaceTrackerSample"); } if (GUILayout.Button ("change camera")) { isFrontFacing = !isFrontFacing; StartCoroutine (init ()); } GUILayout.EndVertical (); } } }
// Update is called once per frame void Update() { lock (sync) { while (ExecuteOnMainThread.Count > 0) { ExecuteOnMainThread.Dequeue().Invoke(); } } if (webCamTextureToMatHelper.IsPlaying() && webCamTextureToMatHelper.DidUpdateThisFrame()) { Mat rgbaMat = webCamTextureToMatHelper.GetDownScaleMat(webCamTextureToMatHelper.GetMat()); Imgproc.cvtColor(rgbaMat, grayMat, Imgproc.COLOR_RGBA2GRAY); Imgproc.equalizeHist(grayMat, grayMat); if (enableDetection && !isDetecting) { isDetecting = true; grayMat.copyTo(grayMat4Thread); StartThread(ThreadWorker); } Rect[] rects; if (!useSeparateDetection) { if (hasUpdatedDetectionResult) { hasUpdatedDetectionResult = false; rectangleTracker.UpdateTrackedObjects(detectionResult.toList()); } rectangleTracker.GetObjects(resultObjects, true); rects = resultObjects.ToArray(); // rects = rectangleTracker.CreateCorrectionBySpeedOfRects (); DrawRects(rects, grayMat.width(), grayMat.height()); } else { if (hasUpdatedDetectionResult) { hasUpdatedDetectionResult = false; //Debug.Log("process: get rectsWhereRegions were got from detectionResult"); rectsWhereRegions = detectionResult.toArray(); // rects = rectsWhereRegions; // for (int i = 0; i < rects.Length; i++) { // Imgproc.rectangle (rgbaMat, new Point (rects [i].x, rects [i].y), new Point (rects [i].x + rects [i].width, rects [i].y + rects [i].height), new Scalar (0, 0, 255, 255), 1); // } } else { //Debug.Log("process: get rectsWhereRegions from previous positions"); rectsWhereRegions = rectangleTracker.CreateCorrectionBySpeedOfRects(); // rects = rectsWhereRegions; // for (int i = 0; i < rects.Length; i++) { // Imgproc.rectangle (rgbaMat, new Point (rects [i].x, rects [i].y), new Point (rects [i].x + rects [i].width, rects [i].y + rects [i].height), new Scalar (0, 255, 0, 255), 1); // } } detectedObjectsInRegions.Clear(); if (rectsWhereRegions.Length > 0) { int len = rectsWhereRegions.Length; for (int i = 0; i < len; i++) { DetectInRegion(grayMat, rectsWhereRegions [i], detectedObjectsInRegions); } } rectangleTracker.UpdateTrackedObjects(detectedObjectsInRegions); rectangleTracker.GetObjects(resultObjects, true); rects = resultObjects.ToArray(); DrawRects(rects, grayMat.width(), grayMat.height()); } } if (webCamTextureToMatHelper.IsPlaying()) { Matrix4x4 cameraToWorldMatrix = Camera.main.cameraToWorldMatrix;; Vector3 ccCameraSpacePos = UnProjectVector(projectionMatrix, new Vector3(0.0f, 0.0f, overlayDistance)); Vector3 tlCameraSpacePos = UnProjectVector(projectionMatrix, new Vector3(-overlayDistance, overlayDistance, overlayDistance)); //position Vector3 position = cameraToWorldMatrix.MultiplyPoint3x4(ccCameraSpacePos); gameObject.transform.position = position; //scale Vector3 scale = new Vector3(Mathf.Abs(tlCameraSpacePos.x - ccCameraSpacePos.x) * 2, Mathf.Abs(tlCameraSpacePos.y - ccCameraSpacePos.y) * 2, 1); gameObject.transform.localScale = scale; // Rotate the canvas object so that it faces the user. Quaternion rotation = Quaternion.LookRotation(-cameraToWorldMatrix.GetColumn(2), cameraToWorldMatrix.GetColumn(1)); gameObject.transform.rotation = rotation; rectOverlay.UpdateOverlayTransform(gameObject.transform); } }
public void OnFrameMatAcquired(Mat bgraMat, Matrix4x4 projectionMatrix, Matrix4x4 cameraToWorldMatrix) { Imgproc.cvtColor(bgraMat, grayMat, Imgproc.COLOR_BGRA2GRAY); Imgproc.equalizeHist(grayMat, grayMat); if (enableDetection && !isDetecting) { isDetecting = true; grayMat.copyTo(grayMat4Thread); System.Threading.Tasks.Task.Run(() => { isThreadRunning = true; DetectObject(); isThreadRunning = false; OnDetectionDone(); }); } if (!displayCameraImage) { // fill all black. Imgproc.rectangle(bgraMat, new Point(0, 0), new Point(bgraMat.width(), bgraMat.height()), new Scalar(0, 0, 0, 0), -1); } Rect[] rects; if (!useSeparateDetection) { if (hasUpdatedDetectionResult) { hasUpdatedDetectionResult = false; lock (rectangleTracker) { rectangleTracker.UpdateTrackedObjects(detectionResult.toList()); } } lock (rectangleTracker) { rectangleTracker.GetObjects(resultObjects, true); } rects = resultObjects.ToArray(); for (int i = 0; i < rects.Length; i++) { //UnityEngine.WSA.Application.InvokeOnAppThread (() => { // Debug.Log ("detected face[" + i + "] " + rects [i]); //}, true); Imgproc.rectangle(bgraMat, new Point(rects [i].x, rects [i].y), new Point(rects [i].x + rects [i].width, rects [i].y + rects [i].height), new Scalar(0, 0, 255, 255), 3); } } else { if (hasUpdatedDetectionResult) { hasUpdatedDetectionResult = false; //UnityEngine.WSA.Application.InvokeOnAppThread (() => { // Debug.Log("process: get rectsWhereRegions were got from detectionResult"); //}, true); lock (rectangleTracker) { rectsWhereRegions = detectionResult.toArray(); } rects = rectsWhereRegions; for (int i = 0; i < rects.Length; i++) { Imgproc.rectangle(bgraMat, new Point(rects [i].x, rects [i].y), new Point(rects [i].x + rects [i].width, rects [i].y + rects [i].height), new Scalar(255, 0, 0, 255), 1); } } else { //UnityEngine.WSA.Application.InvokeOnAppThread (() => { // Debug.Log("process: get rectsWhereRegions from previous positions"); //}, true); lock (rectangleTracker) { rectsWhereRegions = rectangleTracker.CreateCorrectionBySpeedOfRects(); } rects = rectsWhereRegions; for (int i = 0; i < rects.Length; i++) { Imgproc.rectangle(bgraMat, new Point(rects [i].x, rects [i].y), new Point(rects [i].x + rects [i].width, rects [i].y + rects [i].height), new Scalar(0, 255, 0, 255), 1); } } detectedObjectsInRegions.Clear(); if (rectsWhereRegions.Length > 0) { int len = rectsWhereRegions.Length; for (int i = 0; i < len; i++) { DetectInRegion(grayMat, rectsWhereRegions [i], detectedObjectsInRegions); } } lock (rectangleTracker) { rectangleTracker.UpdateTrackedObjects(detectedObjectsInRegions); rectangleTracker.GetObjects(resultObjects, true); } rects = resultObjects.ToArray(); for (int i = 0; i < rects.Length; i++) { //UnityEngine.WSA.Application.InvokeOnAppThread (() => { // Debug.Log ("detected face[" + i + "] " + rects [i]); //}, true); Imgproc.rectangle(bgraMat, new Point(rects [i].x, rects [i].y), new Point(rects [i].x + rects [i].width, rects [i].y + rects [i].height), new Scalar(0, 0, 255, 255), 3); } } UnityEngine.WSA.Application.InvokeOnAppThread(() => { if (!webCamTextureToMatHelper.IsPlaying()) { return; } Utils.fastMatToTexture2D(bgraMat, texture); bgraMat.Dispose(); Matrix4x4 worldToCameraMatrix = cameraToWorldMatrix.inverse; quad_renderer.sharedMaterial.SetMatrix("_WorldToCameraMatrix", worldToCameraMatrix); // Position the canvas object slightly in front // of the real world web camera. Vector3 position = cameraToWorldMatrix.GetColumn(3) - cameraToWorldMatrix.GetColumn(2); position *= 1.2f; // Rotate the canvas object so that it faces the user. Quaternion rotation = Quaternion.LookRotation(-cameraToWorldMatrix.GetColumn(2), cameraToWorldMatrix.GetColumn(1)); gameObject.transform.position = position; gameObject.transform.rotation = rotation; }, false); }
// Update is called once per frame void Update() { if (webCamTextureToMatHelper.isPlaying() && webCamTextureToMatHelper.didUpdateThisFrame()) { Mat rgbaMat = webCamTextureToMatHelper.GetMat(); //convert image to greyscale Imgproc.cvtColor(rgbaMat, grayMat, Imgproc.COLOR_RGBA2GRAY); if (faceTracker.getPoints().Count <= 0) { Debug.Log("detectFace"); //convert image to greyscale using (Mat equalizeHistMat = new Mat()) using (MatOfRect faces = new MatOfRect()) { Imgproc.equalizeHist(grayMat, equalizeHistMat); cascade.detectMultiScale(equalizeHistMat, faces, 1.1f, 2, 0 | Objdetect.CASCADE_FIND_BIGGEST_OBJECT | Objdetect.CASCADE_SCALE_IMAGE, new OpenCVForUnity.Size(equalizeHistMat.cols() * 0.15, equalizeHistMat.cols() * 0.15), new Size()); if (faces.rows() > 0) { Debug.Log("faces " + faces.dump()); //add initial face points from MatOfRect faceTracker.addPoints(faces); //draw face rect OpenCVForUnity.Rect[] rects = faces.toArray(); for (int i = 0; i < rects.Length; i++) { #if OPENCV_2 Core.rectangle(rgbaMat, new Point(rects [i].x, rects [i].y), new Point(rects [i].x + rects [i].width, rects [i].y + rects [i].height), new Scalar(255, 0, 0, 255), 2); #else Imgproc.rectangle(rgbaMat, new Point(rects [i].x, rects [i].y), new Point(rects [i].x + rects [i].width, rects [i].y + rects [i].height), new Scalar(255, 0, 0, 255), 2); #endif } } } } //track face points.if face points <= 0, always return false. if (faceTracker.track(grayMat, faceTrackerParams)) { if (shouldDrawFacePoints) { faceTracker.draw(rgbaMat, new Scalar(255, 0, 0, 255), new Scalar(0, 255, 0, 255)); } #if OPENCV_2 Core.putText(rgbaMat, "'Tap' or 'Space Key' to Reset", new Point(5, rgbaMat.rows() - 5), Core.FONT_HERSHEY_SIMPLEX, 0.8, new Scalar(255, 255, 255, 255), 2, Core.LINE_AA, false); #else Imgproc.putText(rgbaMat, "'Tap' or 'Space Key' to Reset", new Point(5, rgbaMat.rows() - 5), Core.FONT_HERSHEY_SIMPLEX, 0.8, new Scalar(255, 255, 255, 255), 2, Imgproc.LINE_AA, false); #endif Point[] points = faceTracker.getPoints() [0]; if (points.Length > 0) { // for (int i = 0; i < points.Length; i++) { // #if OPENCV_2 // Core.putText (rgbaMat, "" + i, new Point (points [i].x, points [i].y), Core.FONT_HERSHEY_SIMPLEX, 0.3, new Scalar (0, 0, 255, 255), 2, Core.LINE_AA, false); // #else // Imgproc.putText (rgbaMat, "" + i, new Point (points [i].x, points [i].y), Core.FONT_HERSHEY_SIMPLEX, 0.3, new Scalar (0, 0, 255, 255), 2, Core.LINE_AA, false); // #endif // } imagePoints.fromArray( points [31], //l eye points [36], //r eye points [67], //nose points [48], //l mouth points [54] //r mouth // , // points [1],//l ear // points [13]//r ear ); Calib3d.solvePnP(objectPoints, imagePoints, camMatrix, distCoeffs, rvec, tvec); bool isRefresh = false; if (tvec.get(2, 0) [0] > 0 && tvec.get(2, 0) [0] < 1200 * ((float)rgbaMat.cols() / (float)webCamTextureToMatHelper.requestWidth)) { isRefresh = true; if (oldRvec == null) { oldRvec = new Mat(); rvec.copyTo(oldRvec); } if (oldTvec == null) { oldTvec = new Mat(); tvec.copyTo(oldTvec); } //filter Rvec Noise. using (Mat absDiffRvec = new Mat()) { Core.absdiff(rvec, oldRvec, absDiffRvec); // Debug.Log ("absDiffRvec " + absDiffRvec.dump()); using (Mat cmpRvec = new Mat()) { Core.compare(absDiffRvec, new Scalar(rvecNoiseFilterRange), cmpRvec, Core.CMP_GT); if (Core.countNonZero(cmpRvec) > 0) { isRefresh = false; } } } //filter Tvec Noise. using (Mat absDiffTvec = new Mat()) { Core.absdiff(tvec, oldTvec, absDiffTvec); // Debug.Log ("absDiffRvec " + absDiffRvec.dump()); using (Mat cmpTvec = new Mat()) { Core.compare(absDiffTvec, new Scalar(tvecNoiseFilterRange), cmpTvec, Core.CMP_GT); if (Core.countNonZero(cmpTvec) > 0) { isRefresh = false; } } } } if (isRefresh) { if (shouldDrawEffects) { rightEye.SetActive(true); } if (shouldDrawEffects) { leftEye.SetActive(true); } if (shouldDrawHead) { head.SetActive(true); } if (shouldDrawAxes) { axes.SetActive(true); } if ((Mathf.Abs((float)(points [48].x - points [56].x)) < Mathf.Abs((float)(points [31].x - points [36].x)) / 2.2 && Mathf.Abs((float)(points [51].y - points [57].y)) > Mathf.Abs((float)(points [31].x - points [36].x)) / 2.9) || Mathf.Abs((float)(points [51].y - points [57].y)) > Mathf.Abs((float)(points [31].x - points [36].x)) / 2.7) { if (shouldDrawEffects) { mouth.SetActive(true); } } else { if (shouldDrawEffects) { mouth.SetActive(false); } } rvec.copyTo(oldRvec); tvec.copyTo(oldTvec); Calib3d.Rodrigues(rvec, rotM); transformationM.SetRow(0, new Vector4((float)rotM.get(0, 0) [0], (float)rotM.get(0, 1) [0], (float)rotM.get(0, 2) [0], (float)tvec.get(0, 0) [0])); transformationM.SetRow(1, new Vector4((float)rotM.get(1, 0) [0], (float)rotM.get(1, 1) [0], (float)rotM.get(1, 2) [0], (float)tvec.get(1, 0) [0])); transformationM.SetRow(2, new Vector4((float)rotM.get(2, 0) [0], (float)rotM.get(2, 1) [0], (float)rotM.get(2, 2) [0], (float)tvec.get(2, 0) [0])); transformationM.SetRow(3, new Vector4(0, 0, 0, 1)); if (shouldMoveARCamera) { if (ARGameObject != null) { ARM = ARGameObject.transform.localToWorldMatrix * invertZM * transformationM.inverse * invertYM; ARUtils.SetTransformFromMatrix(ARCamera.transform, ref ARM); ARGameObject.SetActive(true); } } else { ARM = ARCamera.transform.localToWorldMatrix * invertYM * transformationM * invertZM; if (ARGameObject != null) { ARUtils.SetTransformFromMatrix(ARGameObject.transform, ref ARM); ARGameObject.SetActive(true); } } } } } // Core.putText (rgbaMat, "W:" + rgbaMat.width () + " H:" + rgbaMat.height () + " SO:" + Screen.orientation, new Point (5, rgbaMat.rows () - 10), Core.FONT_HERSHEY_SIMPLEX, 1.0, new Scalar (255, 255, 255, 255), 2, Core.LINE_AA, false); Utils.matToTexture2D(rgbaMat, texture, colors); } if (Input.GetKeyUp(KeyCode.Space) || Input.touchCount > 0) { faceTracker.reset(); if (oldRvec != null) { oldRvec.Dispose(); oldRvec = null; } if (oldTvec != null) { oldTvec.Dispose(); oldTvec = null; } rightEye.SetActive(false); leftEye.SetActive(false); head.SetActive(false); mouth.SetActive(false); axes.SetActive(false); } }