void Update() { if (rawImage) { if (startCVCam && cvCameraMat && cvCameraMat.isPlaying() && cvCameraMat.didUpdateThisFrame() && cvCameraMat.didUpdateThisFrame() && cvCameraMat.didUpdateThisFrame() && texture != null) { Mat cvCamMat = cvCameraMat.GetMat(); if (cvCamMat != null && !cvCamMat.empty() && !faceCascade.empty()) { Mat grayMat = new Mat(); Imgproc.cvtColor(cvCamMat, grayMat, Imgproc.COLOR_RGBA2GRAY); Mat equalizeHistMat = new Mat(); Imgproc.equalizeHist(grayMat, equalizeHistMat); MatOfRect faces = new MatOfRect(); faceCascade.detectMultiScale(equalizeHistMat, faces, 1.1f, 2, 0 | Objdetect.CASCADE_SCALE_IMAGE, new Size(equalizeHistMat.cols() * 0.13, equalizeHistMat.cols() * 0.13), new Size()); if (faces.rows() > 0) { List <OpenCVForUnity.Rect> rectsList = faces.toList(); for (int i = 0; i < rectsList.ToArray().Length; i++) { OpenCVForUnity.Rect faceRect = rectsList[i]; x = faceRect.x; y = faceRect.y; if (i > 0) { //左上が(0、0)、右下が(100、100) OpenCVForUnity.Rect beforeFaceRect = rectsList [i - 1]; formerX = beforeFaceRect.x; formerY = beforeFaceRect.y; dx = x - formerX; dy = y - formerY; _gameManager.setDxDy(dx, dy); Debug.Log(x + ":" + y); } } } } if (cvCamMat != null && !cvCamMat.empty()) { try { cvCameraMat.matToTexture2D(cvCamMat, texture); } catch (System.ArgumentException e) { Debug.Log(e.Message); } catch (System.Exception e) { Debug.Log("OtherError"); } cvCamMat = null; } } } else { Debug.LogError("NotFound:rawImage"); } }
public List<Point[]> detect (Mat im, float scaleFactor, int minNeighbours, OpenCVForUnity.Size minSize) { //convert image to greyscale Mat gray = null; if (im.channels () == 1) { gray = im; } else { gray = new Mat (); Imgproc.cvtColor (im, gray, Imgproc.COLOR_RGBA2GRAY); } using (Mat equalizeHistMat = new Mat ()) using (MatOfRect faces = new MatOfRect ()) { Imgproc.equalizeHist (gray, equalizeHistMat); detector.detectMultiScale (equalizeHistMat, faces, scaleFactor, minNeighbours, 0 | Objdetect.CASCADE_FIND_BIGGEST_OBJECT | Objdetect.CASCADE_SCALE_IMAGE, minSize, new Size ()); if (faces.rows () < 1) { return new List<Point[]> (); } return convertMatOfRectToPoints (faces); } }
public List <Point[]> detect(Mat im, float scaleFactor, int minNeighbours, Size minSize) { //convert image to greyscale Mat gray = null; if (im.channels() == 1) { gray = im; } else { gray = new Mat(); Imgproc.cvtColor(im, gray, Imgproc.COLOR_RGBA2GRAY); } using (Mat equalizeHistMat = new Mat()) using (MatOfRect faces = new MatOfRect()) { Imgproc.equalizeHist(gray, equalizeHistMat); detector.detectMultiScale(equalizeHistMat, faces, scaleFactor, minNeighbours, 0 | Objdetect.CASCADE_FIND_BIGGEST_OBJECT | Objdetect.CASCADE_SCALE_IMAGE, minSize, new Size()); if (faces.rows() < 1) { return(new List <Point[]> ()); } return(convertMatOfRectToPoints(faces)); } }
// Use this for initialization void Start () { //initialize FaceTracker FaceTracker faceTracker = new FaceTracker (Utils.getFilePath ("tracker_model.json")); //initialize FaceTrackerParams FaceTrackerParams faceTrackerParams = new FaceTrackerParams (); gameObject.transform.localScale = new Vector3 (imgTexture.width, imgTexture.height, 1); Camera.main.orthographicSize = imgTexture.height / 2; Mat imgMat = new Mat (imgTexture.height, imgTexture.width, CvType.CV_8UC4); Utils.texture2DToMat (imgTexture, imgMat); Debug.Log ("imgMat dst ToString " + imgMat.ToString ()); CascadeClassifier cascade = new CascadeClassifier (Utils.getFilePath ("haarcascade_frontalface_alt.xml")); if (cascade.empty ()) { Debug.LogError ("cascade file is not loaded.Please copy from “FaceTrackerSample/StreamingAssets/” to “Assets/StreamingAssets/” folder. "); } //convert image to greyscale Mat gray = new Mat (); Imgproc.cvtColor (imgMat, gray, Imgproc.COLOR_RGBA2GRAY); MatOfRect faces = new MatOfRect (); Imgproc.equalizeHist (gray, gray); cascade.detectMultiScale (gray, faces, 1.1f, 2, 0 // | Objdetect.CASCADE_FIND_BIGGEST_OBJECT | Objdetect.CASCADE_SCALE_IMAGE, new OpenCVForUnity.Size (gray.cols () * 0.05, gray.cols () * 0.05), new Size ()); Debug.Log ("faces " + faces.dump ()); if (faces.rows () > 0) { //add initial face points from MatOfRect faceTracker.addPoints (faces); } //track face points.if face points <= 0, always return false. if (faceTracker.track (imgMat, faceTrackerParams)) faceTracker.draw (imgMat, new Scalar (255, 0, 0, 255), new Scalar (0, 255, 0, 255)); Texture2D texture = new Texture2D (imgMat.cols (), imgMat.rows (), TextureFormat.RGBA32, false); Utils.matToTexture2D (imgMat, texture); gameObject.GetComponent<Renderer> ().material.mainTexture = texture; }
// Update is called once per frame void Update() { if (webCamTextureToMatHelper.IsPlaying() && webCamTextureToMatHelper.DidUpdateThisFrame()) { Mat rgbaMat = webCamTextureToMatHelper.GetMat(); //convert image to greyscale Imgproc.cvtColor(rgbaMat, grayMat, Imgproc.COLOR_RGBA2GRAY); if (isAutoResetMode || faceTracker.getPoints().Count <= 0) { // Debug.Log ("detectFace"); //convert image to greyscale using (Mat equalizeHistMat = new Mat()) using (MatOfRect faces = new MatOfRect()) { Imgproc.equalizeHist(grayMat, equalizeHistMat); cascade.detectMultiScale(equalizeHistMat, faces, 1.1f, 2, 0 // | Objdetect.CASCADE_FIND_BIGGEST_OBJECT | Objdetect.CASCADE_SCALE_IMAGE, new OpenCVForUnity.Size(equalizeHistMat.cols() * 0.15, equalizeHistMat.cols() * 0.15), new Size()); if (faces.rows() > 0) { // Debug.Log ("faces " + faces.dump ()); List <OpenCVForUnity.Rect> rectsList = faces.toList(); List <Point[]> pointsList = faceTracker.getPoints(); if (isAutoResetMode) { //add initial face points from MatOfRect if (pointsList.Count <= 0) { faceTracker.addPoints(faces); // Debug.Log ("reset faces "); } else { for (int i = 0; i < rectsList.Count; i++) { OpenCVForUnity.Rect trackRect = new OpenCVForUnity.Rect(rectsList [i].x + rectsList [i].width / 3, rectsList [i].y + rectsList [i].height / 2, rectsList [i].width / 3, rectsList [i].height / 3); //It determines whether nose point has been included in trackRect. if (i < pointsList.Count && !trackRect.contains(pointsList [i] [67])) { rectsList.RemoveAt(i); pointsList.RemoveAt(i); // Debug.Log ("remove " + i); } Imgproc.rectangle(rgbaMat, new Point(trackRect.x, trackRect.y), new Point(trackRect.x + trackRect.width, trackRect.y + trackRect.height), new Scalar(0, 0, 255, 255), 2); } } } else { faceTracker.addPoints(faces); } //draw face rect for (int i = 0; i < rectsList.Count; i++) { #if OPENCV_2 Core.rectangle(rgbaMat, new Point(rectsList [i].x, rectsList [i].y), new Point(rectsList [i].x + rectsLIst [i].width, rectsList [i].y + rectsList [i].height), new Scalar(255, 0, 0, 255), 2); #else Imgproc.rectangle(rgbaMat, new Point(rectsList [i].x, rectsList [i].y), new Point(rectsList [i].x + rectsList [i].width, rectsList [i].y + rectsList [i].height), new Scalar(255, 0, 0, 255), 2); #endif } } else { if (isAutoResetMode) { faceTracker.reset(); } } } } //track face points.if face points <= 0, always return false. if (faceTracker.track(grayMat, faceTrackerParams)) { faceTracker.draw(rgbaMat, new Scalar(255, 0, 0, 255), new Scalar(0, 255, 0, 255)); } #if OPENCV_2 Core.putText(rgbaMat, "'Tap' or 'Space Key' to Reset", new Point(5, rgbaMat.rows() - 5), Core.FONT_HERSHEY_SIMPLEX, 0.8, new Scalar(255, 255, 255, 255), 2, Core.LINE_AA, false); #else Imgproc.putText(rgbaMat, "'Tap' or 'Space Key' to Reset", new Point(5, rgbaMat.rows() - 5), Core.FONT_HERSHEY_SIMPLEX, 0.8, new Scalar(255, 255, 255, 255), 2, Imgproc.LINE_AA, false); #endif // Core.putText (rgbaMat, "W:" + rgbaMat.width () + " H:" + rgbaMat.height () + " SO:" + Screen.orientation, new Point (5, rgbaMat.rows () - 10), Core.FONT_HERSHEY_SIMPLEX, 1.0, new Scalar (255, 255, 255, 255), 2, Core.LINE_AA, false); Utils.matToTexture2D(rgbaMat, texture, webCamTextureToMatHelper.GetBufferColors()); } if (Input.GetKeyUp(KeyCode.Space) || Input.touchCount > 0) { faceTracker.reset(); } }
/// <summary> /// Raises the webcam texture to mat helper error occurred event. /// </summary> /// <param name="errorCode">Error code.</param> // public void OnWebCamTextureToMatHelperErrorOccurred(WebCamTextureToMatHelper.ErrorCode errorCode) // { // Debug.Log("OnWebCamTextureToMatHelperErrorOccurred " + errorCode); // } // Update is called once per frame void Update() { if (webCamTextureToMatHelper.IsPlaying() && webCamTextureToMatHelper.DidUpdateThisFrame()) { Mat rgbaMat = webCamTextureToMatHelper.GetMat(); //convert image to greyscale Imgproc.cvtColor(rgbaMat, grayMat, Imgproc.COLOR_RGBA2GRAY); if (isAutoResetMode || faceTracker.getPoints().Count <= 0) { // Debug.Log ("detectFace"); //convert image to greyscale using (var equalizeHistMat = new Mat()) using (var faces = new MatOfRect()) { Imgproc.equalizeHist(grayMat, equalizeHistMat); cascade.detectMultiScale(equalizeHistMat, faces, 1.1f, 2, 0 | Objdetect.CASCADE_SCALE_IMAGE, new OpenCVForUnity.Size(equalizeHistMat.cols() * 0.15, equalizeHistMat.cols() * 0.15), new Size() ); if (faces.rows() > 0) { // Debug.Log ("faces " + faces.dump ()); List <OpenCVForUnity.Rect> rectsList = faces.toList(); List <Point[]> pointsList = faceTracker.getPoints(); if (isAutoResetMode) { //add initial face points from MatOfRect if (pointsList.Count <= 0) { faceTracker.addPoints(faces); // Debug.Log ("reset faces "); } else { for (int i = 0; i < rectsList.Count; i++) { var trackRect = new OpenCVForUnity.Rect(rectsList [i].x + rectsList [i].width / 3, rectsList [i].y + rectsList [i].height / 2, rectsList [i].width / 3, rectsList [i].height / 3); //It determines whether nose point has been included in trackRect. if (i < pointsList.Count && !trackRect.contains(pointsList [i] [67])) { rectsList.RemoveAt(i); pointsList.RemoveAt(i); // Debug.Log ("remove " + i); } Imgproc.rectangle(rgbaMat, new Point(trackRect.x, trackRect.y), new Point(trackRect.x + trackRect.width, trackRect.y + trackRect.height), new Scalar(0, 0, 255, 255), 2); } } } else { faceTracker.addPoints(faces); } // ターゲットメッシュのリストを更新 { while (targetMeshList.Count < rectsList.Count) { var obj = Instantiate(targetMeshPrefab).GetComponent <MeshRenderer>(); obj.transform.rotation = Quaternion.Euler(-90, 0, 0); targetMeshList.Add(obj); } for (int i = targetMeshList.Count - 1; i >= 0; --i) { if (i >= rectsList.Count) { targetMeshList[i].material.color = Color.clear; } else { targetMeshList[i].material.color = Color.red; } } } //draw face rect for (int i = 0; i < rectsList.Count; i++) { #if OPENCV_2 Core.rectangle(rgbaMat, new Point(rectsList [i].x, rectsList [i].y), new Point(rectsList [i].x + rectsLIst [i].width, rectsList [i].y + rectsList [i].height), new Scalar(255, 0, 0, 255), 2); #else Imgproc.rectangle(rgbaMat, new Point(rectsList [i].x, rectsList [i].y), new Point(rectsList [i].x + rectsList [i].width, rectsList [i].y + rectsList [i].height), new Scalar(255, 0, 0, 255), 2); #endif // 顔の中心にオブジェクトを移動 { var rect = rectsList[i]; // Debug.Log( string.Format("Rect位置( {0}, {1}) Rectサイズ( {2}, {3})", rect.x, rect.y, rect.width, rect.height) ); var pos = new Vector2( rect.x - (rect.width / 2) , // rect.y + ( rect.height / 2 ) rect.y ); // オブジェクトを移動する targetMeshList[i].transform.localPosition = Vector2ToVector3(pos); } } // 顔の中心位置にオブジェクトを移動 if (false) { for (int i = 0; i < pointsList.Count; ++i) { Vector2 pos; // 中心位置を求める { double sumX = 0, sumY = 0; for (int j = 0; j < pointsList[i].Length; ++j) { var point = pointsList[i][j]; sumX += point.x; sumY += point.y; } var averageX = sumX / pointsList[i].Length; var averageY = sumY / pointsList[i].Length; pos = new Vector2((float)averageX, (float)averageY); } { double leftEnd = 0, topEnd = 0; } // オブジェクトを移動する targetMeshList[i].transform.localPosition = Vector2ToVector3(pos); } } } else { if (isAutoResetMode) { faceTracker.reset(); } } } } //track face points.if face points <= 0, always return false. if (faceTracker.track(grayMat, faceTrackerParams)) { faceTracker.draw(rgbaMat, new Scalar(255, 0, 0, 255), new Scalar(0, 255, 0, 255)); } #if OPENCV_2 Core.putText(rgbaMat, "'Tap' or 'Space Key' to Reset", new Point(5, rgbaMat.rows() - 5), Core.FONT_HERSHEY_SIMPLEX, 0.8, new Scalar(255, 255, 255, 255), 2, Core.LINE_AA, false); #else Imgproc.putText(rgbaMat, "'Tap' or 'Space Key' to Reset", new Point(5, rgbaMat.rows() - 5), Core.FONT_HERSHEY_SIMPLEX, 0.8, new Scalar(255, 255, 255, 255), 2, Imgproc.LINE_AA, false); #endif // Core.putText (rgbaMat, "W:" + rgbaMat.width () + " H:" + rgbaMat.height () + " SO:" + Screen.orientation, new Point (5, rgbaMat.rows () - 10), Core.FONT_HERSHEY_SIMPLEX, 1.0, new Scalar (255, 255, 255, 255), 2, Core.LINE_AA, false); Utils.matToTexture2D(rgbaMat, texture, webCamTextureToMatHelper.GetBufferColors()); } if (Input.GetKeyUp(KeyCode.Space) || Input.touchCount > 0) { faceTracker.reset(); } }
// Update is called once per frame void Update() { if (webCamTextureToMatHelper.isPlaying() && webCamTextureToMatHelper.didUpdateThisFrame()) { Mat rgbaMat = webCamTextureToMatHelper.GetMat(); //convert image to greyscale Imgproc.cvtColor(rgbaMat, grayMat, Imgproc.COLOR_RGBA2GRAY); if (faceTracker.getPoints().Count <= 0) { Debug.Log("detectFace"); //convert image to greyscale using (Mat equalizeHistMat = new Mat()) using (MatOfRect faces = new MatOfRect()) { Imgproc.equalizeHist(grayMat, equalizeHistMat); cascade.detectMultiScale(equalizeHistMat, faces, 1.1f, 2, 0 // | Objdetect.CASCADE_FIND_BIGGEST_OBJECT | Objdetect.CASCADE_SCALE_IMAGE, new OpenCVForUnity.Size(equalizeHistMat.cols() * 0.15, equalizeHistMat.cols() * 0.15), new Size()); if (faces.rows() > 0) { Debug.Log("faces " + faces.dump()); //add initial face points from MatOfRect faceTracker.addPoints(faces); //draw face rect OpenCVForUnity.Rect[] rects = faces.toArray(); for (int i = 0; i < rects.Length; i++) { #if OPENCV_2 Core.rectangle(rgbaMat, new Point(rects [i].x, rects [i].y), new Point(rects [i].x + rects [i].width, rects [i].y + rects [i].height), new Scalar(255, 0, 0, 255), 2); #else Imgproc.rectangle(rgbaMat, new Point(rects [i].x, rects [i].y), new Point(rects [i].x + rects [i].width, rects [i].y + rects [i].height), new Scalar(255, 0, 0, 255), 2); #endif } } } } //track face points.if face points <= 0, always return false. if (faceTracker.track(grayMat, faceTrackerParams)) { faceTracker.draw(rgbaMat, new Scalar(255, 0, 0, 255), new Scalar(0, 255, 0, 255)); } #if OPENCV_2 Core.putText(rgbaMat, "'Tap' or 'Space Key' to Reset", new Point(5, rgbaMat.rows() - 5), Core.FONT_HERSHEY_SIMPLEX, 0.8, new Scalar(255, 255, 255, 255), 2, Core.LINE_AA, false); #else Imgproc.putText(rgbaMat, "'Tap' or 'Space Key' to Reset", new Point(5, rgbaMat.rows() - 5), Core.FONT_HERSHEY_SIMPLEX, 0.8, new Scalar(255, 255, 255, 255), 2, Imgproc.LINE_AA, false); #endif // Core.putText (rgbaMat, "W:" + rgbaMat.width () + " H:" + rgbaMat.height () + " SO:" + Screen.orientation, new Point (5, rgbaMat.rows () - 10), Core.FONT_HERSHEY_SIMPLEX, 1.0, new Scalar (255, 255, 255, 255), 2, Core.LINE_AA, false); Utils.matToTexture2D(rgbaMat, texture, colors); } if (Input.GetKeyUp(KeyCode.Space) || Input.touchCount > 0) { faceTracker.reset(); } }
// Update is called once per frame void Update() { if (webCamTextureToMatHelper.IsPlaying() && webCamTextureToMatHelper.DidUpdateThisFrame()) { Mat rgbaMat = webCamTextureToMatHelper.GetMat(); //convert image to greyscale Imgproc.cvtColor(rgbaMat, grayMat, Imgproc.COLOR_RGBA2GRAY); if (isAutoResetMode || faceTracker.getPoints().Count <= 0) { // Debug.Log ("detectFace"); //convert image to greyscale using (Mat equalizeHistMat = new Mat()) using (MatOfRect faces = new MatOfRect()) { Imgproc.equalizeHist(grayMat, equalizeHistMat); cascade.detectMultiScale(equalizeHistMat, faces, 1.1f, 2, 0 | Objdetect.CASCADE_FIND_BIGGEST_OBJECT | Objdetect.CASCADE_SCALE_IMAGE, new Size(equalizeHistMat.cols() * 0.15, equalizeHistMat.cols() * 0.15), new Size()); if (faces.rows() > 0) { // Debug.Log ("faces " + faces.dump ()); List <OpenCVForUnity.CoreModule.Rect> rectsList = faces.toList(); List <Point[]> pointsList = faceTracker.getPoints(); if (isAutoResetMode) { //add initial face points from MatOfRect if (pointsList.Count <= 0) { faceTracker.addPoints(faces); // Debug.Log ("reset faces "); } else { for (int i = 0; i < rectsList.Count; i++) { OpenCVForUnity.CoreModule.Rect trackRect = new OpenCVForUnity.CoreModule.Rect(rectsList [i].x + rectsList [i].width / 3, rectsList [i].y + rectsList [i].height / 2, rectsList [i].width / 3, rectsList [i].height / 3); //It determines whether nose point has been included in trackRect. if (i < pointsList.Count && !trackRect.contains(pointsList [i] [67])) { rectsList.RemoveAt(i); pointsList.RemoveAt(i); // Debug.Log ("remove " + i); } Imgproc.rectangle(rgbaMat, new Point(trackRect.x, trackRect.y), new Point(trackRect.x + trackRect.width, trackRect.y + trackRect.height), new Scalar(0, 0, 255, 255), 2); } } } else { faceTracker.addPoints(faces); } //draw face rect for (int i = 0; i < rectsList.Count; i++) { Imgproc.rectangle(rgbaMat, new Point(rectsList [i].x, rectsList [i].y), new Point(rectsList [i].x + rectsList [i].width, rectsList [i].y + rectsList [i].height), new Scalar(255, 0, 0, 255), 2); } } else { if (isAutoResetMode) { faceTracker.reset(); rightEye.SetActive(false); leftEye.SetActive(false); head.SetActive(false); mouth.SetActive(false); axes.SetActive(false); } } } } //track face points.if face points <= 0, always return false. if (faceTracker.track(grayMat, faceTrackerParams)) { if (isShowingFacePoints) { faceTracker.draw(rgbaMat, new Scalar(255, 0, 0, 255), new Scalar(0, 255, 0, 255)); } Imgproc.putText(rgbaMat, "'Tap' or 'Space Key' to Reset", new Point(5, rgbaMat.rows() - 5), Imgproc.FONT_HERSHEY_SIMPLEX, 0.8, new Scalar(255, 255, 255, 255), 2, Imgproc.LINE_AA, false); Point[] points = faceTracker.getPoints() [0]; if (points.Length > 0) { // for (int i = 0; i < points.Length; i++) // { // Imgproc.putText(rgbaMat, "" + i, new Point(points [i].x, points [i].y), Imgproc.FONT_HERSHEY_SIMPLEX, 0.3, new Scalar(0, 0, 255, 255), 2, Imgproc.LINE_AA, false); // } imagePoints.fromArray( points [31], //l eye points [36], //r eye points [67], //nose points [48], //l mouth points [54] //r mouth // , // points [0],//l ear // points [14]//r ear ); Calib3d.solvePnP(objectPoints, imagePoints, camMatrix, distCoeffs, rvec, tvec); bool isRefresh = false; if (tvec.get(2, 0) [0] > 0 && tvec.get(2, 0) [0] < 1200 * ((float)rgbaMat.cols() / (float)webCamTextureToMatHelper.requestedWidth)) { isRefresh = true; if (oldRvec == null) { oldRvec = new Mat(); rvec.copyTo(oldRvec); } if (oldTvec == null) { oldTvec = new Mat(); tvec.copyTo(oldTvec); } //filter Rvec Noise. using (Mat absDiffRvec = new Mat()) { Core.absdiff(rvec, oldRvec, absDiffRvec); // Debug.Log ("absDiffRvec " + absDiffRvec.dump()); using (Mat cmpRvec = new Mat()) { Core.compare(absDiffRvec, new Scalar(rvecNoiseFilterRange), cmpRvec, Core.CMP_GT); if (Core.countNonZero(cmpRvec) > 0) { isRefresh = false; } } } //filter Tvec Noise. using (Mat absDiffTvec = new Mat()) { Core.absdiff(tvec, oldTvec, absDiffTvec); // Debug.Log ("absDiffRvec " + absDiffRvec.dump()); using (Mat cmpTvec = new Mat()) { Core.compare(absDiffTvec, new Scalar(tvecNoiseFilterRange), cmpTvec, Core.CMP_GT); if (Core.countNonZero(cmpTvec) > 0) { isRefresh = false; } } } } if (isRefresh) { if (isShowingEffects) { rightEye.SetActive(true); } if (isShowingEffects) { leftEye.SetActive(true); } if (isShowingHead) { head.SetActive(true); } if (isShowingAxes) { axes.SetActive(true); } if ((Mathf.Abs((float)(points [48].x - points [56].x)) < Mathf.Abs((float)(points [31].x - points [36].x)) / 2.2 && Mathf.Abs((float)(points [51].y - points [57].y)) > Mathf.Abs((float)(points [31].x - points [36].x)) / 2.9) || Mathf.Abs((float)(points [51].y - points [57].y)) > Mathf.Abs((float)(points [31].x - points [36].x)) / 2.7) { if (isShowingEffects) { mouth.SetActive(true); } } else { if (isShowingEffects) { mouth.SetActive(false); } } rvec.copyTo(oldRvec); tvec.copyTo(oldTvec); Calib3d.Rodrigues(rvec, rotM); transformationM.SetRow(0, new Vector4((float)rotM.get(0, 0) [0], (float)rotM.get(0, 1) [0], (float)rotM.get(0, 2) [0], (float)tvec.get(0, 0) [0])); transformationM.SetRow(1, new Vector4((float)rotM.get(1, 0) [0], (float)rotM.get(1, 1) [0], (float)rotM.get(1, 2) [0], (float)tvec.get(1, 0) [0])); transformationM.SetRow(2, new Vector4((float)rotM.get(2, 0) [0], (float)rotM.get(2, 1) [0], (float)rotM.get(2, 2) [0], (float)tvec.get(2, 0) [0])); transformationM.SetRow(3, new Vector4(0, 0, 0, 1)); // right-handed coordinates system (OpenCV) to left-handed one (Unity) ARM = invertYM * transformationM; // Apply Z-axis inverted matrix. ARM = ARM * invertZM; if (shouldMoveARCamera) { if (ARGameObject != null) { ARM = ARGameObject.transform.localToWorldMatrix * ARM.inverse; ARUtils.SetTransformFromMatrix(ARCamera.transform, ref ARM); ARGameObject.SetActive(true); } } else { ARM = ARCamera.transform.localToWorldMatrix * ARM; if (ARGameObject != null) { ARUtils.SetTransformFromMatrix(ARGameObject.transform, ref ARM); ARGameObject.SetActive(true); } } } } } // Imgproc.putText (rgbaMat, "W:" + rgbaMat.width () + " H:" + rgbaMat.height () + " SO:" + Screen.orientation, new Point (5, rgbaMat.rows () - 10), Imgproc.FONT_HERSHEY_SIMPLEX, 1.0, new Scalar (255, 255, 255, 255), 2, Imgproc.LINE_AA, false); Utils.fastMatToTexture2D(rgbaMat, texture); } if (Input.GetKeyUp(KeyCode.Space) || Input.touchCount > 0) { faceTracker.reset(); if (oldRvec != null) { oldRvec.Dispose(); oldRvec = null; } if (oldTvec != null) { oldTvec.Dispose(); oldTvec = null; } rightEye.SetActive(false); leftEye.SetActive(false); head.SetActive(false); mouth.SetActive(false); axes.SetActive(false); } }
private IEnumerator init() { if (webCamTexture != null) { faceTracker.reset (); webCamTexture.Stop (); initDone = false; rgbaMat.Dispose (); grayMat.Dispose (); } // Checks how many and which cameras are available on the device for (int cameraIndex = 0; cameraIndex < WebCamTexture.devices.Length; cameraIndex++) { if (WebCamTexture.devices [cameraIndex].isFrontFacing == isFrontFacing) { Debug.Log (cameraIndex + " name " + WebCamTexture.devices [cameraIndex].name + " isFrontFacing " + WebCamTexture.devices [cameraIndex].isFrontFacing); webCamDevice = WebCamTexture.devices [cameraIndex]; webCamTexture = new WebCamTexture (webCamDevice.name, width, height); break; } } if (webCamTexture == null) { webCamDevice = WebCamTexture.devices [0]; webCamTexture = new WebCamTexture (webCamDevice.name, width, height); } Debug.Log ("width " + webCamTexture.width + " height " + webCamTexture.height + " fps " + webCamTexture.requestedFPS); // Starts the camera webCamTexture.Play (); while (true) { //If you want to use webcamTexture.width and webcamTexture.height on iOS, you have to wait until webcamTexture.didUpdateThisFrame == 1, otherwise these two values will be equal to 16. (http://forum.unity3d.com/threads/webcamtexture-and-error-0x0502.123922/) #if UNITY_IOS && !UNITY_EDITOR && (UNITY_4_6_3 || UNITY_4_6_4 || UNITY_5_0_0 || UNITY_5_0_1) if (webCamTexture.width > 16 && webCamTexture.height > 16) { #else if (webCamTexture.didUpdateThisFrame) { #endif Debug.Log ("width " + webCamTexture.width + " height " + webCamTexture.height + " fps " + webCamTexture.requestedFPS); Debug.Log ("videoRotationAngle " + webCamTexture.videoRotationAngle + " videoVerticallyMirrored " + webCamTexture.videoVerticallyMirrored + " isFrongFacing " + webCamDevice.isFrontFacing); colors = new Color32[webCamTexture.width * webCamTexture.height]; rgbaMat = new Mat (webCamTexture.height, webCamTexture.width, CvType.CV_8UC4); grayMat = new Mat (webCamTexture.height, webCamTexture.width, CvType.CV_8UC1); texture = new Texture2D (webCamTexture.width, webCamTexture.height, TextureFormat.RGBA32, false); cascade = new CascadeClassifier (Utils.getFilePath ("haarcascade_frontalface_alt.xml")); if (cascade.empty ()) { Debug.LogError ("cascade file is not loaded.Please copy from “FaceTrackerSample/StreamingAssets/” to “Assets/StreamingAssets/” folder. "); } gameObject.transform.localEulerAngles = new Vector3 (0, 0, 0); // gameObject.transform.rotation = gameObject.transform.rotation * Quaternion.AngleAxis (webCamTexture.videoRotationAngle, Vector3.back); gameObject.transform.localScale = new Vector3 (webCamTexture.width, webCamTexture.height, 1); // bool _videoVerticallyMirrored = webCamTexture.videoVerticallyMirrored; // float scaleX = 1; // float scaleY = _videoVerticallyMirrored ? -1.0f : 1.0f; // gameObject.transform.localScale = new Vector3 (scaleX * gameObject.transform.localScale.x, scaleY * gameObject.transform.localScale.y, 1); gameObject.GetComponent<Renderer> ().material.mainTexture = texture; Camera.main.orthographicSize = webCamTexture.height / 2; initDone = true; break; } else { yield return 0; } } } // Update is called once per frame void Update () { if (!initDone) return; #if UNITY_IOS && !UNITY_EDITOR && (UNITY_4_6_3 || UNITY_4_6_4 || UNITY_5_0_0 || UNITY_5_0_1) if (webCamTexture.width > 16 && webCamTexture.height > 16) { #else if (webCamTexture.didUpdateThisFrame) { #endif Utils.webCamTextureToMat (webCamTexture, rgbaMat, colors); //flip to correct direction. if (webCamTexture.videoVerticallyMirrored) { if (webCamDevice.isFrontFacing) { if (webCamTexture.videoRotationAngle == 0) { Core.flip (rgbaMat, rgbaMat, -1); } else if (webCamTexture.videoRotationAngle == 180) { Core.flip (rgbaMat, rgbaMat, 0); } } else { if (webCamTexture.videoRotationAngle == 0) { } else if (webCamTexture.videoRotationAngle == 180) { Core.flip (rgbaMat, rgbaMat, 1); } } } else { if (webCamDevice.isFrontFacing) { if (webCamTexture.videoRotationAngle == 0) { Core.flip (rgbaMat, rgbaMat, 1); } else if (webCamTexture.videoRotationAngle == 180) { Core.flip (rgbaMat, rgbaMat, 0); } } else { if (webCamTexture.videoRotationAngle == 0) { } else if (webCamTexture.videoRotationAngle == 180) { Core.flip (rgbaMat, rgbaMat, -1); } } } //convert image to greyscale Imgproc.cvtColor (rgbaMat, grayMat, Imgproc.COLOR_RGBA2GRAY); if (faceTracker.getPoints ().Count <= 0) { Debug.Log ("detectFace"); //convert image to greyscale using (Mat equalizeHistMat = new Mat ()) using (MatOfRect faces = new MatOfRect ()) { Imgproc.equalizeHist (grayMat, equalizeHistMat); cascade.detectMultiScale (equalizeHistMat, faces, 1.1f, 2, 0 // | Objdetect.CASCADE_FIND_BIGGEST_OBJECT | Objdetect.CASCADE_SCALE_IMAGE, new OpenCVForUnity.Size (equalizeHistMat.cols () * 0.15, equalizeHistMat.cols () * 0.15), new Size ()); if (faces.rows () > 0) { Debug.Log ("faces " + faces.dump ()); //add initial face points from MatOfRect faceTracker.addPoints (faces); //draw face rect OpenCVForUnity.Rect[] rects = faces.toArray (); for (int i = 0; i < rects.Length; i++) { Core.rectangle (rgbaMat, new Point (rects [i].x, rects [i].y), new Point (rects [i].x + rects [i].width, rects [i].y + rects [i].height), new Scalar (255, 0, 0, 255), 2); } } } } //track face points.if face points <= 0, always return false. if (faceTracker.track (grayMat, faceTrackerParams)) faceTracker.draw (rgbaMat, new Scalar (255, 0, 0, 255), new Scalar (0, 255, 0, 255)); Core.putText (rgbaMat, "'Tap' or 'Space Key' to Reset", new Point (5, rgbaMat.rows () - 5), Core.FONT_HERSHEY_SIMPLEX, 0.8, new Scalar (255, 255, 255, 255), 2, Core.LINE_AA, false); Utils.matToTexture2D (rgbaMat, texture, colors); } if (Input.GetKeyUp (KeyCode.Space) || Input.touchCount > 0) { faceTracker.reset (); } } void OnDisable () { webCamTexture.Stop (); } void OnGUI () { float screenScale = Screen.height / 240.0f; Matrix4x4 scaledMatrix = Matrix4x4.Scale (new Vector3 (screenScale, screenScale, screenScale)); GUI.matrix = scaledMatrix; GUILayout.BeginVertical (); if (GUILayout.Button ("back")) { Application.LoadLevel ("FaceTrackerSample"); } if (GUILayout.Button ("change camera")) { isFrontFacing = !isFrontFacing; StartCoroutine (init ()); } GUILayout.EndVertical (); } } }
private IEnumerator init() { axes.SetActive(false); head.SetActive(false); rightEye.SetActive(false); leftEye.SetActive(false); mouth.SetActive(false); if (webCamTexture != null) { faceTracker.reset(); webCamTexture.Stop(); initDone = false; rgbaMat.Dispose(); grayMat.Dispose(); cascade.Dispose(); camMatrix.Dispose(); distCoeffs.Dispose(); } // Checks how many and which cameras are available on the device for (int cameraIndex = 0; cameraIndex < WebCamTexture.devices.Length; cameraIndex++) { if (WebCamTexture.devices [cameraIndex].isFrontFacing == isFrontFacing) { Debug.Log(cameraIndex + " name " + WebCamTexture.devices [cameraIndex].name + " isFrontFacing " + WebCamTexture.devices [cameraIndex].isFrontFacing); webCamDevice = WebCamTexture.devices [cameraIndex]; webCamTexture = new WebCamTexture(webCamDevice.name, width, height); break; } } if (webCamTexture == null) { webCamDevice = WebCamTexture.devices [0]; webCamTexture = new WebCamTexture(webCamDevice.name, width, height); } Debug.Log("width " + webCamTexture.width + " height " + webCamTexture.height + " fps " + webCamTexture.requestedFPS); // Starts the camera webCamTexture.Play(); while (true) { //If you want to use webcamTexture.width and webcamTexture.height on iOS, you have to wait until webcamTexture.didUpdateThisFrame == 1, otherwise these two values will be equal to 16. (http://forum.unity3d.com/threads/webcamtexture-and-error-0x0502.123922/) #if UNITY_IOS && !UNITY_EDITOR && (UNITY_4_6_3 || UNITY_4_6_4 || UNITY_5_0_0 || UNITY_5_0_1) if (webCamTexture.width > 16 && webCamTexture.height > 16) { #else if (webCamTexture.didUpdateThisFrame) { #endif Debug.Log("width " + webCamTexture.width + " height " + webCamTexture.height + " fps " + webCamTexture.requestedFPS); Debug.Log("videoRotationAngle " + webCamTexture.videoRotationAngle + " videoVerticallyMirrored " + webCamTexture.videoVerticallyMirrored + " isFrongFacing " + webCamDevice.isFrontFacing); colors = new Color32[webCamTexture.width * webCamTexture.height]; rgbaMat = new Mat(webCamTexture.height, webCamTexture.width, CvType.CV_8UC4); grayMat = new Mat(webCamTexture.height, webCamTexture.width, CvType.CV_8UC1); texture = new Texture2D(webCamTexture.width, webCamTexture.height, TextureFormat.RGBA32, false); cascade = new CascadeClassifier(Utils.getFilePath("haarcascade_frontalface_alt.xml")); if (cascade.empty()) { Debug.LogError("cascade file is not loaded.Please copy from “FaceTrackerSample/StreamingAssets/” to “Assets/StreamingAssets/” folder. "); } gameObject.transform.localScale = new Vector3(webCamTexture.width, webCamTexture.height, 1); gameObject.transform.localEulerAngles = new Vector3(0, 0, 0); // gameObject.transform.rotation = gameObject.transform.rotation * Quaternion.AngleAxis (webCamTexture.videoRotationAngle, Vector3.back); // bool _videoVerticallyMirrored = webCamTexture.videoVerticallyMirrored; // float scaleX = 1; // float scaleY = _videoVerticallyMirrored ? -1.0f : 1.0f; // gameObject.transform.localScale = new Vector3 (scaleX * gameObject.transform.localScale.x, scaleY * gameObject.transform.localScale.y, 1); gameObject.GetComponent <Renderer> ().material.mainTexture = texture; Camera.main.orthographicSize = webCamTexture.height / 2; int max_d = Mathf.Max(rgbaMat.rows(), rgbaMat.cols()); camMatrix = new Mat(3, 3, CvType.CV_64FC1); camMatrix.put(0, 0, max_d); camMatrix.put(0, 1, 0); camMatrix.put(0, 2, rgbaMat.cols() / 2.0f); camMatrix.put(1, 0, 0); camMatrix.put(1, 1, max_d); camMatrix.put(1, 2, rgbaMat.rows() / 2.0f); camMatrix.put(2, 0, 0); camMatrix.put(2, 1, 0); camMatrix.put(2, 2, 1.0f); Size imageSize = new Size(rgbaMat.cols(), rgbaMat.rows()); double apertureWidth = 0; double apertureHeight = 0; double[] fovx = new double[1]; double[] fovy = new double[1]; double[] focalLength = new double[1]; Point principalPoint = new Point(); double[] aspectratio = new double[1]; Calib3d.calibrationMatrixValues(camMatrix, imageSize, apertureWidth, apertureHeight, fovx, fovy, focalLength, principalPoint, aspectratio); Debug.Log("imageSize " + imageSize.ToString()); Debug.Log("apertureWidth " + apertureWidth); Debug.Log("apertureHeight " + apertureHeight); Debug.Log("fovx " + fovx [0]); Debug.Log("fovy " + fovy [0]); Debug.Log("focalLength " + focalLength [0]); Debug.Log("principalPoint " + principalPoint.ToString()); Debug.Log("aspectratio " + aspectratio [0]); ARCamera.fieldOfView = (float)fovy [0]; Debug.Log("camMatrix " + camMatrix.dump()); distCoeffs = new MatOfDouble(0, 0, 0, 0); Debug.Log("distCoeffs " + distCoeffs.dump()); lookAtM = getLookAtMatrix(new Vector3(0, 0, 0), new Vector3(0, 0, 1), new Vector3(0, -1, 0)); Debug.Log("lookAt " + lookAtM.ToString()); invertZM = Matrix4x4.TRS(Vector3.zero, Quaternion.identity, new Vector3(1, 1, -1)); initDone = true; break; } else { yield return(0); } } } // Update is called once per frame void Update() { if (!initDone) { return; } #if UNITY_IOS && !UNITY_EDITOR && (UNITY_4_6_3 || UNITY_4_6_4 || UNITY_5_0_0 || UNITY_5_0_1) if (webCamTexture.width > 16 && webCamTexture.height > 16) { #else if (webCamTexture.didUpdateThisFrame) { #endif Utils.webCamTextureToMat(webCamTexture, rgbaMat, colors); //flip to correct direction. if (webCamTexture.videoVerticallyMirrored) { if (webCamDevice.isFrontFacing) { if (webCamTexture.videoRotationAngle == 0) { Core.flip(rgbaMat, rgbaMat, -1); } else if (webCamTexture.videoRotationAngle == 180) { Core.flip(rgbaMat, rgbaMat, 0); } } else { if (webCamTexture.videoRotationAngle == 0) { } else if (webCamTexture.videoRotationAngle == 180) { Core.flip(rgbaMat, rgbaMat, 1); } } } else { if (webCamDevice.isFrontFacing) { if (webCamTexture.videoRotationAngle == 0) { Core.flip(rgbaMat, rgbaMat, 1); } else if (webCamTexture.videoRotationAngle == 180) { Core.flip(rgbaMat, rgbaMat, 0); } } else { if (webCamTexture.videoRotationAngle == 0) { } else if (webCamTexture.videoRotationAngle == 180) { Core.flip(rgbaMat, rgbaMat, -1); } } } //convert image to greyscale Imgproc.cvtColor(rgbaMat, grayMat, Imgproc.COLOR_RGBA2GRAY); if (faceTracker.getPoints().Count <= 0) { Debug.Log("detectFace"); //convert image to greyscale using (Mat equalizeHistMat = new Mat()) using (MatOfRect faces = new MatOfRect()) { Imgproc.equalizeHist(grayMat, equalizeHistMat); cascade.detectMultiScale(equalizeHistMat, faces, 1.1f, 2, 0 | Objdetect.CASCADE_FIND_BIGGEST_OBJECT | Objdetect.CASCADE_SCALE_IMAGE, new OpenCVForUnity.Size(equalizeHistMat.cols() * 0.15, equalizeHistMat.cols() * 0.15), new Size()); if (faces.rows() > 0) { Debug.Log("faces " + faces.dump()); //add initial face points from MatOfRect faceTracker.addPoints(faces); //draw face rect OpenCVForUnity.Rect[] rects = faces.toArray(); for (int i = 0; i < rects.Length; i++) { Core.rectangle(rgbaMat, new Point(rects [i].x, rects [i].y), new Point(rects [i].x + rects [i].width, rects [i].y + rects [i].height), new Scalar(255, 0, 0, 255), 2); } } } } //track face points.if face points <= 0, always return false. if (faceTracker.track(grayMat, faceTrackerParams)) { if (isDrawPoints) { faceTracker.draw(rgbaMat, new Scalar(255, 0, 0, 255), new Scalar(0, 255, 0, 255)); } Core.putText(rgbaMat, "'Tap' or 'Space Key' to Reset", new Point(5, rgbaMat.rows() - 5), Core.FONT_HERSHEY_SIMPLEX, 0.8, new Scalar(255, 255, 255, 255), 2, Core.LINE_AA, false); Point[] points = faceTracker.getPoints() [0]; if (points.Length > 0) { // for (int i = 0; i < points.Length; i++) { // Core.putText (rgbaMat, "" + i, new Point (points [i].x, points [i].y), Core.FONT_HERSHEY_SIMPLEX, 0.3, new Scalar (0, 0, 255, 255), 2, Core.LINE_AA, false); // } imagePoints.fromArray( points [31], //l eye points [36], //r eye points [67], //nose points [48], //l mouth points [54] //r mouth // , // points [1],//l ear // points [13]//r ear ); Calib3d.solvePnP(objectPoints, imagePoints, camMatrix, distCoeffs, rvec, tvec); bool isRefresh = false; if (tvec.get(2, 0) [0] > 0 && tvec.get(2, 0) [0] < 1200 * ((float)webCamTexture.width / (float)width)) { isRefresh = true; if (oldRvec == null) { oldRvec = new Mat(); rvec.copyTo(oldRvec); } if (oldTvec == null) { oldTvec = new Mat(); tvec.copyTo(oldTvec); } //filter Rvec Noise. using (Mat absDiffRvec = new Mat()) { Core.absdiff(rvec, oldRvec, absDiffRvec); // Debug.Log ("absDiffRvec " + absDiffRvec.dump()); using (Mat cmpRvec = new Mat()) { Core.compare(absDiffRvec, new Scalar(rvecNoiseFilterRange), cmpRvec, Core.CMP_GT); if (Core.countNonZero(cmpRvec) > 0) { isRefresh = false; } } } //filter Tvec Noise. using (Mat absDiffTvec = new Mat()) { Core.absdiff(tvec, oldTvec, absDiffTvec); // Debug.Log ("absDiffRvec " + absDiffRvec.dump()); using (Mat cmpTvec = new Mat()) { Core.compare(absDiffTvec, new Scalar(tvecNoiseFilterRange), cmpTvec, Core.CMP_GT); if (Core.countNonZero(cmpTvec) > 0) { isRefresh = false; } } } } if (isRefresh) { if (!rightEye.activeSelf) { rightEye.SetActive(true); } if (!leftEye.activeSelf) { leftEye.SetActive(true); } if ((Mathf.Abs((float)(points [48].x - points [56].x)) < Mathf.Abs((float)(points [31].x - points [36].x)) / 2.2 && Mathf.Abs((float)(points [51].y - points [57].y)) > Mathf.Abs((float)(points [31].x - points [36].x)) / 2.9) || Mathf.Abs((float)(points [51].y - points [57].y)) > Mathf.Abs((float)(points [31].x - points [36].x)) / 2.7) { if (!mouth.activeSelf) { mouth.SetActive(true); } } else { if (mouth.activeSelf) { mouth.SetActive(false); } } rvec.copyTo(oldRvec); tvec.copyTo(oldTvec); Calib3d.Rodrigues(rvec, rotM); transformationM.SetRow(0, new Vector4((float)rotM.get(0, 0) [0], (float)rotM.get(0, 1) [0], (float)rotM.get(0, 2) [0], (float)tvec.get(0, 0) [0])); transformationM.SetRow(1, new Vector4((float)rotM.get(1, 0) [0], (float)rotM.get(1, 1) [0], (float)rotM.get(1, 2) [0], (float)tvec.get(1, 0) [0])); transformationM.SetRow(2, new Vector4((float)rotM.get(2, 0) [0], (float)rotM.get(2, 1) [0], (float)rotM.get(2, 2) [0], (float)tvec.get(2, 0) [0])); transformationM.SetRow(3, new Vector4(0, 0, 0, 1)); modelViewMtrx = lookAtM * transformationM * invertZM; ARCamera.worldToCameraMatrix = modelViewMtrx; // Debug.Log ("modelViewMtrx " + modelViewMtrx.ToString()); } } } Utils.matToTexture2D(rgbaMat, texture, colors); } if (Input.GetKeyUp(KeyCode.Space) || Input.touchCount > 0) { faceTracker.reset(); if (oldRvec != null) { oldRvec.Dispose(); oldRvec = null; } if (oldTvec != null) { oldTvec.Dispose(); oldTvec = null; } ARCamera.ResetWorldToCameraMatrix(); rightEye.SetActive(false); leftEye.SetActive(false); mouth.SetActive(false); } } void OnDisable() { webCamTexture.Stop(); } private Matrix4x4 getLookAtMatrix(Vector3 pos, Vector3 target, Vector3 up) { Vector3 z = Vector3.Normalize(pos - target); Vector3 x = Vector3.Normalize(Vector3.Cross(up, z)); Vector3 y = Vector3.Normalize(Vector3.Cross(z, x)); Matrix4x4 result = new Matrix4x4(); result.SetRow(0, new Vector4(x.x, x.y, x.z, -(Vector3.Dot(pos, x)))); result.SetRow(1, new Vector4(y.x, y.y, y.z, -(Vector3.Dot(pos, y)))); result.SetRow(2, new Vector4(z.x, z.y, z.z, -(Vector3.Dot(pos, z)))); result.SetRow(3, new Vector4(0, 0, 0, 1)); return(result); } void OnGUI() { float screenScale = Screen.height / 240.0f; Matrix4x4 scaledMatrix = Matrix4x4.Scale(new Vector3(screenScale, screenScale, screenScale)); GUI.matrix = scaledMatrix; GUILayout.BeginVertical(); if (GUILayout.Button("back")) { Application.LoadLevel("FaceTrackerSample"); } if (GUILayout.Button("change camera")) { isFrontFacing = !isFrontFacing; StartCoroutine(init()); } if (GUILayout.Button("drawPoints")) { if (isDrawPoints) { isDrawPoints = false; } else { isDrawPoints = true; } } if (GUILayout.Button("axes")) { if (axes.activeSelf) { axes.SetActive(false); } else { axes.SetActive(true); } } if (GUILayout.Button("head")) { if (head.activeSelf) { head.SetActive(false); } else { head.SetActive(true); } } GUILayout.EndVertical(); } }
// Use this for initialization void Start() { //initialize FaceTracker FaceTracker faceTracker = new FaceTracker(Utils.getFilePath("tracker_model.json")); //initialize FaceTrackerParams FaceTrackerParams faceTrackerParams = new FaceTrackerParams(); gameObject.transform.localScale = new Vector3(imgTexture.width, imgTexture.height, 1); Camera.main.orthographicSize = imgTexture.height / 2; Mat imgMat = new Mat(imgTexture.height, imgTexture.width, CvType.CV_8UC4); Utils.texture2DToMat(imgTexture, imgMat); Debug.Log("imgMat dst ToString " + imgMat.ToString()); CascadeClassifier cascade = new CascadeClassifier(Utils.getFilePath("haarcascade_frontalface_alt.xml")); if (cascade.empty()) { Debug.LogError("cascade file is not loaded.Please copy from “FaceTrackerSample/StreamingAssets/” to “Assets/StreamingAssets/” folder. "); } //convert image to greyscale Mat gray = new Mat(); Imgproc.cvtColor(imgMat, gray, Imgproc.COLOR_RGBA2GRAY); MatOfRect faces = new MatOfRect(); Imgproc.equalizeHist(gray, gray); cascade.detectMultiScale(gray, faces, 1.1f, 2, 0 // | Objdetect.CASCADE_FIND_BIGGEST_OBJECT | Objdetect.CASCADE_SCALE_IMAGE, new OpenCVForUnity.Size(gray.cols() * 0.05, gray.cols() * 0.05), new Size()); Debug.Log("faces " + faces.dump()); if (faces.rows() > 0) { //add initial face points from MatOfRect faceTracker.addPoints(faces); } //track face points.if face points <= 0, always return false. if (faceTracker.track(imgMat, faceTrackerParams)) { faceTracker.draw(imgMat, new Scalar(255, 0, 0, 255), new Scalar(0, 255, 0, 255)); } Texture2D texture = new Texture2D(imgMat.cols(), imgMat.rows(), TextureFormat.RGBA32, false); Utils.matToTexture2D(imgMat, texture); gameObject.GetComponent <Renderer> ().material.mainTexture = texture; }
// 更新は、フレームごとに呼ばれます void Update() { if (!initDone) return; if (screenOrientation != Screen.orientation) { screenOrientation = Screen.orientation; updateLayout(); } #if UNITY_IOS && !UNITY_EDITOR && (UNITY_4_6_3 || UNITY_4_6_4 || UNITY_5_0_0 || UNITY_5_0_1) if (webCamTexture.width > 16 && webCamTexture.height > 16) { #else if (webCamTexture.didUpdateThisFrame) { #endif Utils.webCamTextureToMat(webCamTexture, rgbaMat, colors); // 方向を修正するために反転. if (webCamDevice.isFrontFacing) { if (webCamTexture.videoRotationAngle == 0) { Core.flip(rgbaMat, rgbaMat, 1); } else if (webCamTexture.videoRotationAngle == 90) { Core.flip(rgbaMat, rgbaMat, 0); } if (webCamTexture.videoRotationAngle == 180) { Core.flip(rgbaMat, rgbaMat, 0); } else if (webCamTexture.videoRotationAngle == 270) { Core.flip(rgbaMat, rgbaMat, 1); } } else { if (webCamTexture.videoRotationAngle == 180) { Core.flip(rgbaMat, rgbaMat, -1); } else if (webCamTexture.videoRotationAngle == 270) { Core.flip(rgbaMat, rgbaMat, -1); } } // グレースケールにイメージを変換します Imgproc.cvtColor(rgbaMat, grayMat, Imgproc.COLOR_RGBA2GRAY); if (faceTracker.getPoints().Count <= 0) { Debug.Log("detectFace"); // グレースケールにイメージを変換します using (Mat equalizeHistMat = new Mat()) using (MatOfRect faces = new MatOfRect()) { Imgproc.equalizeHist(grayMat, equalizeHistMat); cascade.detectMultiScale(equalizeHistMat, faces, 1.1f, 2, 0 | Objdetect.CASCADE_FIND_BIGGEST_OBJECT | Objdetect.CASCADE_SCALE_IMAGE, new OpenCVForUnity.Size(equalizeHistMat.cols() * 0.15, equalizeHistMat.cols() * 0.15), new Size()); if (faces.rows() > 0) { Debug.Log("--------------------------faces"); Debug.Log("faces " + faces.dump()); Debug.Log("--------------------------faces"); // MatOfRectから顔の初期座標を追加 faceTracker.addPoints(faces); // 顔の四角形を描きます OpenCVForUnity.Rect[] rects = faces.toArray(); for (int i = 0; i < rects.Length; i++) { Core.rectangle(rgbaMat, new Point(rects[i].x, rects[i].y), new Point(rects[i].x + rects[i].width, rects[i].y + rects[i].height), new Scalar(255, 0, 0, 255), 2); } } } } // 顔の座標を追跡します.if face points <= 0, always return false. if (faceTracker.track(grayMat, faceTrackerParams)) { if (isDrawPoints) //Debug.Log("--------------------------100-----------------------------------"); faceTracker.draw(rgbaMat, new Scalar(255, 0, 0, 255), new Scalar(0, 255, 0, 255)); //Debug.Log("--------------------------1000"); //Core.putText(rgbaMat, "'Tap' or 'Space Key' to Reset", new Point(5, rgbaMat.rows() - 5), Core.FONT_HERSHEY_SIMPLEX, 0.8, new Scalar(255, 255, 255, 255), 2, Core.LINE_AA, false); Point[] points = faceTracker.getPoints()[0]; //eyeWidth = points[36] - points[31]; /* Debug.Log("--------------------------0"); Debug.Log(points[31]); Debug.Log("--------------------------2"); Debug.Log(points[36]); Debug.Log("--------------------------1"); */ if (points.Length > 0) { imagePoints.fromArray( points[31], //l eye points[36], //r eye points[67], // nose points[48], //l mouth points[54] //r mouth ); Calib3d.solvePnP(objectPoints, imagePoints, camMatrix, distCoeffs, rvec, tvec); bool isRefresh = false; if (tvec.get(2, 0)[0] > 0 && tvec.get(2, 0)[0] < 1200 * ((float)webCamTexture.width / (float)width)) { isRefresh = true; if (oldRvec == null) { oldRvec = new Mat(); rvec.copyTo(oldRvec); } if (oldTvec == null) { oldTvec = new Mat(); tvec.copyTo(oldTvec); } // Rvecノイズをフィルタリング. using (Mat absDiffRvec = new Mat()) { Core.absdiff(rvec, oldRvec, absDiffRvec); using (Mat cmpRvec = new Mat()) { Core.compare(absDiffRvec, new Scalar(rvecNoiseFilterRange), cmpRvec, Core.CMP_GT); if (Core.countNonZero(cmpRvec) > 0) isRefresh = false; } } // Tvecノイズをフィルタリング. using (Mat absDiffTvec = new Mat()) { Core.absdiff(tvec, oldTvec, absDiffTvec); using (Mat cmpTvec = new Mat()) { Core.compare(absDiffTvec, new Scalar(tvecNoiseFilterRange), cmpTvec, Core.CMP_GT); if (Core.countNonZero(cmpTvec) > 0) isRefresh = false; } } } if (isRefresh) { if (!rightEye.activeSelf) rightEye.SetActive (true); if (!leftEye.activeSelf) leftEye.SetActive (true); rvec.copyTo(oldRvec); tvec.copyTo(oldTvec); Calib3d.Rodrigues(rvec, rotM); transformationM.SetRow(0, new Vector4((float)rotM.get(0, 0)[0], (float)rotM.get(0, 1)[0], (float)rotM.get(0, 2)[0], (float)tvec.get(0, 0)[0])); transformationM.SetRow(1, new Vector4((float)rotM.get(1, 0)[0], (float)rotM.get(1, 1)[0], (float)rotM.get(1, 2)[0], (float)tvec.get(1, 0)[0])); transformationM.SetRow(2, new Vector4((float)rotM.get(2, 0)[0], (float)rotM.get(2, 1)[0], (float)rotM.get(2, 2)[0], (float)tvec.get(2, 0)[0])); transformationM.SetRow(3, new Vector4(0, 0, 0, 1)); modelViewMtrx = lookAtM * transformationM * invertZM; ARCamera.worldToCameraMatrix = modelViewMtrx; } } } Utils.matToTexture2D(rgbaMat, texture, colors); } if (Input.GetKeyUp(KeyCode.Space) || Input.touchCount > 0) { faceTracker.reset(); if (oldRvec != null) { oldRvec.Dispose(); oldRvec = null; } if (oldTvec != null) { oldTvec.Dispose(); oldTvec = null; } ARCamera.ResetWorldToCameraMatrix(); rightEye.SetActive (false); leftEye.SetActive (false); } } void OnDisable() { webCamTexture.Stop(); }
// Update is called once per frame void Update() { if (Input.GetMouseButtonDown(0)) { Debug.Log(Input.mousePosition); } if (isCamera) { if (webCamTextureToMatHelper.IsPlaying() && webCamTextureToMatHelper.DidUpdateThisFrame()) { Mat rgbaMat = webCamTextureToMatHelper.GetMat(); //convert image to greyscale Imgproc.cvtColor(rgbaMat, grayMat, Imgproc.COLOR_RGBA2GRAY); if (isAutoResetMode || faceTracker.getPoints().Count <= 0) { // Debug.Log ("detectFace"); //convert image to greyscale using (Mat equalizeHistMat = new Mat()) using (MatOfRect faces = new MatOfRect()) { Imgproc.equalizeHist(grayMat, equalizeHistMat); cascade.detectMultiScale(equalizeHistMat, faces, 1.1f, 2, 0 // | Objdetect.CASCADE_FIND_BIGGEST_OBJECT | Objdetect.CASCADE_SCALE_IMAGE, new OpenCVForUnity.Size(equalizeHistMat.cols() * 0.15, equalizeHistMat.cols() * 0.15), new Size()); if (faces.rows() > 0) { // Debug.Log ("faces " + faces.dump ()); List <OpenCVForUnity.Rect> rectsList = faces.toList(); List <Point[]> pointsList = faceTracker.getPoints(); if (isAutoResetMode) { //add initial face points from MatOfRect if (pointsList.Count <= 0) { faceTracker.addPoints(faces); // Debug.Log ("reset faces "); } else { // 얼굴 부분을 찍어야할 때 if (isShot) { for (int i = 0; i < rectsList.Count; i++) { OpenCVForUnity.Rect trackRect = new OpenCVForUnity.Rect(rectsList[i].x + rectsList[i].width / 3, rectsList[i].y + rectsList[i].height / 2, rectsList[i].width / 3, rectsList[i].height / 3); //It determines whether nose point has been included in trackRect. if (i < pointsList.Count && !trackRect.contains(pointsList[i][67])) { rectsList.RemoveAt(i); pointsList.RemoveAt(i); // Debug.Log ("remove " + i); } double left = 0; double right = 0; double up = 0; double down = 0; //track face points.if face points <= 0, always return false. if (faceTracker.track(rgbaMat, faceTrackerParams)) { // 얼굴 좌푤르 저장할 Point List pointsList = faceTracker.getPoints(); // 상하좌우 기준점을 찾음 left = findMin(pointsList[0][0].x, pointsList[0][1].x, pointsList[0][2].x, pointsList[0][3].x); right = findMax(pointsList[0][14].x, pointsList[0][13].x, pointsList[0][12].x, pointsList[0][11].x); up = findMin(pointsList[0][16].y, pointsList[0][17].y, pointsList[0][22].y, pointsList[0][23].y); down = findMax(pointsList[0][6].y, pointsList[0][7].y, pointsList[0][8].y, pointsList[0][9].y); // 얼굴 좌표 출력 //for (int i = 0; i < pointsList[0].Length; i++) //{ // Imgproc.putText(imgMat, i + "", pointsList[0][i], Core.FONT_HERSHEY_SIMPLEX, 0.2, new Scalar(255, 2, 10, 255), 1, Imgproc.LINE_AA, false); //} //faceTracker.draw(imgMat, new Scalar(255, 0, 0, 255), new Scalar(0, 255, 0, 255)); } // 가로폭, 세로폭을 정함 double widthLR = (right - left) / 10; double heightU = (down - up) / 3; double heightD = (down - up) / 18; // 스크린 상의 빨간색 네모 왼쪽 위와 오른쪽 아래의 좌표 찾음 Vector2 screenPos1 = Camera.main.WorldToScreenPoint(new Vector2((float)(left - widthLR), (float)(up - heightU))); Vector2 screenPos2 = Camera.main.WorldToScreenPoint(new Vector2((float)(right + widthLR), (float)(down + heightD))); // 스크린 상의 그림의 왼쪽 위와 오른쪽 위 좌표 Vector2 leftUp = Camera.main.WorldToScreenPoint(new Vector2(0f, 0f)); Vector2 rightUp = Camera.main.WorldToScreenPoint(new Vector2(texture.width / 2, texture.height / 2)); // 가로, 세로 여백의 크기 추출 float widthSpace = Screen.width - rightUp.x; float heightSpace = Screen.height - rightUp.y; // 중앙의 스크린 좌표 Vector2 mid = Camera.main.WorldToScreenPoint(new Vector2(0f, 0f)); // 그림과 빨간색 상자 사이의 여백 float leftSpace = screenPos1.x - leftUp.x; float upSpace = screenPos1.y - leftUp.y; // 빨간색 상자의 가로, 세로를 정함 float widthImg = screenPos2.x - screenPos1.x; float heightImg = screenPos2.y - screenPos1.y; //Debug.Log("width: " + widthImg + ", height: " + heightImg); // 저장을 할 땐 사각형 표시 X if (!isTake) { // 기준점과 폭을 토대로 사각형을 그림 Imgproc.rectangle(rgbaMat, new Point(left - widthLR, up - heightU), new Point(right + widthLR, down + heightD), new Scalar(255, 0, 0, 255), 2); } if (isTake) { // image를 정사각형으로 저장 // 가로가 크냐 세로가 크냐에 따라 따로 저장 if (widthImg > heightImg) { int picX = (int)(widthSpace + leftSpace); int picY = (int)(Screen.height - upSpace - heightImg - (widthImg - heightImg) / 2 - heightSpace); // 얼굴 크기가 화면 크기 1/4 보다 커야하고 얼굴이 중앙 if (widthImg > Screen.width / 4 && picX < mid.x && picX + widthImg > mid.x && picY < mid.y && picY + heightImg > mid.y) { StartCoroutine(downImage((int)widthImg, picX, picY)); } else { StartCoroutine(reShot()); } } else { int picX = (int)(widthSpace + leftSpace - (heightImg - widthImg) / 2); int picY = (int)(Screen.height - upSpace - heightImg - heightSpace); //Debug.Log("picX : " + picX); //Debug.Log("picY : " + picY); //Debug.Log("wid : " + widthImg); //Debug.Log("hei : " + heightImg); //Debug.Log("midX : " + mid.x); //Debug.Log("midY : " + mid.y); if (heightImg > Screen.width / 4 && picX < mid.x && picX + widthImg > mid.x && picY < mid.y && picY + heightImg > mid.y) { StartCoroutine(downImage((int)heightImg, picX, picY)); } else { StartCoroutine(reShot()); } } // 안쪽 파란 사각형으로 얼굴 표현 //Imgproc.rectangle(rgbaMat, new Point(trackRect.x, trackRect.y), new Point(trackRect.x + trackRect.width, trackRect.y + trackRect.height), new Scalar(0, 0, 255, 255), 2); } } } } } else { faceTracker.addPoints(faces); } } else { if (isTake) { StartCoroutine(reShot()); } if (isAutoResetMode) { faceTracker.reset(); } } } } // 얼굴 부분을 점으로 표현 //track face points.if face points <= 0, always return false. //if (faceTracker.track(grayMat, faceTrackerParams)) // faceTracker.draw(rgbaMat, new Scalar(255, 0, 0, 255), new Scalar(0, 255, 0, 255)); #if OPENCV_2 Core.putText(rgbaMat, "'Tap' or 'Space Key' to Reset", new Point(5, rgbaMat.rows() - 5), Core.FONT_HERSHEY_SIMPLEX, 0.8, new Scalar(255, 255, 255, 255), 2, Core.LINE_AA, false); #else //Imgproc.putText(rgbaMat, "'Tap' or 'Space Key' to Reset", new Point(5, rgbaMat.rows() - 5), Core.FONT_HERSHEY_SIMPLEX, 0.8, new Scalar(255, 255, 255, 255), 2, Imgproc.LINE_AA, false); #endif // Core.putText (rgbaMat, "W:" + rgbaMat.width () + " H:" + rgbaMat.height () + " SO:" + Screen.orientation, new Point (5, rgbaMat.rows () - 10), Core.FONT_HERSHEY_SIMPLEX, 1.0, new Scalar (255, 255, 255, 255), 2, Core.LINE_AA, false); Utils.matToTexture2D(rgbaMat, texture, webCamTextureToMatHelper.GetBufferColors()); } } }
// Update is called once per frame void Update () { if (webCamTextureToMatHelper.isPlaying () && webCamTextureToMatHelper.didUpdateThisFrame ()) { Mat rgbaMat = webCamTextureToMatHelper.GetMat (); //convert image to greyscale Imgproc.cvtColor (rgbaMat, grayMat, Imgproc.COLOR_RGBA2GRAY); if (faceTracker.getPoints ().Count <= 0) { Debug.Log ("detectFace"); //convert image to greyscale using (Mat equalizeHistMat = new Mat ()) using (MatOfRect faces = new MatOfRect ()) { Imgproc.equalizeHist (grayMat, equalizeHistMat); cascade.detectMultiScale (equalizeHistMat, faces, 1.1f, 2, 0 | Objdetect.CASCADE_FIND_BIGGEST_OBJECT | Objdetect.CASCADE_SCALE_IMAGE, new OpenCVForUnity.Size (equalizeHistMat.cols () * 0.15, equalizeHistMat.cols () * 0.15), new Size ()); if (faces.rows () > 0) { Debug.Log ("faces " + faces.dump ()); //add initial face points from MatOfRect faceTracker.addPoints (faces); //draw face rect OpenCVForUnity.Rect[] rects = faces.toArray (); for (int i = 0; i < rects.Length; i++) { #if OPENCV_3 Imgproc.rectangle(rgbaMat, new Point(rects[i].x, rects[i].y), new Point(rects[i].x + rects[i].width, rects[i].y + rects[i].height), new Scalar(255, 0, 0, 255), 2); #else Core.rectangle (rgbaMat, new Point (rects [i].x, rects [i].y), new Point (rects [i].x + rects [i].width, rects [i].y + rects [i].height), new Scalar (255, 0, 0, 255), 2); #endif } } } } //track face points.if face points <= 0, always return false. if (faceTracker.track (grayMat, faceTrackerParams)) { if (isDrawPoints) faceTracker.draw (rgbaMat, new Scalar (255, 0, 0, 255), new Scalar (0, 255, 0, 255)); #if OPENCV_3 Imgproc.putText(rgbaMat, "'Tap' or 'Space Key' to Reset", new Point(5, rgbaMat.rows() - 5), Core.FONT_HERSHEY_SIMPLEX, 0.8, new Scalar(255, 255, 255, 255), 2, Imgproc.LINE_AA, false); #else Core.putText (rgbaMat, "'Tap' or 'Space Key' to Reset", new Point (5, rgbaMat.rows () - 5), Core.FONT_HERSHEY_SIMPLEX, 0.8, new Scalar (255, 255, 255, 255), 2, Core.LINE_AA, false); #endif Point[] points = faceTracker.getPoints () [0]; if (points.Length > 0) { // for (int i = 0; i < points.Length; i++) { //#if OPENCV_3 // Imgproc.putText(rgbaMat, "" + i, new Point(points[i].x, points[i].y), Core.FONT_HERSHEY_SIMPLEX, 0.3, new Scalar(0, 0, 255, 255), 2, Core.LINE_AA, false); //#else // Core.putText (rgbaMat, "" + i, new Point (points [i].x, points [i].y), Core.FONT_HERSHEY_SIMPLEX, 0.3, new Scalar (0, 0, 255, 255), 2, Core.LINE_AA, false); //#endif // } imagePoints.fromArray ( points [31],//l eye points [36],//r eye points [67],//nose points [48],//l mouth points [54] //r mouth // , // points [1],//l ear // points [13]//r ear ); Calib3d.solvePnP (objectPoints, imagePoints, camMatrix, distCoeffs, rvec, tvec); bool isRefresh = false; if (tvec.get (2, 0) [0] > 0 && tvec.get (2, 0) [0] < 1200 * ((float)rgbaMat.cols () / (float)webCamTextureToMatHelper.requestWidth)) { isRefresh = true; if (oldRvec == null) { oldRvec = new Mat (); rvec.copyTo (oldRvec); } if (oldTvec == null) { oldTvec = new Mat (); tvec.copyTo (oldTvec); } //filter Rvec Noise. using (Mat absDiffRvec = new Mat ()) { Core.absdiff (rvec, oldRvec, absDiffRvec); // Debug.Log ("absDiffRvec " + absDiffRvec.dump()); using (Mat cmpRvec = new Mat ()) { Core.compare (absDiffRvec, new Scalar (rvecNoiseFilterRange), cmpRvec, Core.CMP_GT); if (Core.countNonZero (cmpRvec) > 0) isRefresh = false; } } //filter Tvec Noise. using (Mat absDiffTvec = new Mat ()) { Core.absdiff (tvec, oldTvec, absDiffTvec); // Debug.Log ("absDiffRvec " + absDiffRvec.dump()); using (Mat cmpTvec = new Mat ()) { Core.compare (absDiffTvec, new Scalar (tvecNoiseFilterRange), cmpTvec, Core.CMP_GT); if (Core.countNonZero (cmpTvec) > 0) isRefresh = false; } } } if (isRefresh) { if (!rightEye.activeSelf) rightEye.SetActive (true); if (!leftEye.activeSelf) leftEye.SetActive (true); if ((Mathf.Abs ((float)(points [48].x - points [56].x)) < Mathf.Abs ((float)(points [31].x - points [36].x)) / 2.2 && Mathf.Abs ((float)(points [51].y - points [57].y)) > Mathf.Abs ((float)(points [31].x - points [36].x)) / 2.9) || Mathf.Abs ((float)(points [51].y - points [57].y)) > Mathf.Abs ((float)(points [31].x - points [36].x)) / 2.7) { if (!mouth.activeSelf) mouth.SetActive (true); } else { if (mouth.activeSelf) mouth.SetActive (false); } rvec.copyTo (oldRvec); tvec.copyTo (oldTvec); Calib3d.Rodrigues (rvec, rotM); transformationM .SetRow (0, new Vector4 ((float)rotM.get (0, 0) [0], (float)rotM.get (0, 1) [0], (float)rotM.get (0, 2) [0], (float)tvec.get (0, 0) [0])); transformationM.SetRow (1, new Vector4 ((float)rotM.get (1, 0) [0], (float)rotM.get (1, 1) [0], (float)rotM.get (1, 2) [0], (float)tvec.get (1, 0) [0])); transformationM.SetRow (2, new Vector4 ((float)rotM.get (2, 0) [0], (float)rotM.get (2, 1) [0], (float)rotM.get (2, 2) [0], (float)tvec.get (2, 0) [0])); transformationM.SetRow (3, new Vector4 (0, 0, 0, 1)); modelViewMtrx = lookAtM * transformationM * invertZM; ARCamera.worldToCameraMatrix = modelViewMtrx; // Debug.Log ("modelViewMtrx " + modelViewMtrx.ToString()); } } } // Core.putText (rgbaMat, "W:" + rgbaMat.width () + " H:" + rgbaMat.height () + " SO:" + Screen.orientation, new Point (5, rgbaMat.rows () - 10), Core.FONT_HERSHEY_SIMPLEX, 1.0, new Scalar (255, 255, 255, 255), 2, Core.LINE_AA, false); Utils.matToTexture2D (rgbaMat, texture, colors); } if (Input.GetKeyUp (KeyCode.Space) || Input.touchCount > 0) { faceTracker.reset (); if (oldRvec != null) { oldRvec.Dispose (); oldRvec = null; } if (oldTvec != null) { oldTvec.Dispose (); oldTvec = null; } ARCamera.ResetWorldToCameraMatrix (); rightEye.SetActive (false); leftEye.SetActive (false); mouth.SetActive (false); } }
// Update is called once per frame void Update() { if (webCamTextureToMatHelper.IsPlaying() && webCamTextureToMatHelper.DidUpdateThisFrame()) { Mat rgbaMat = webCamTextureToMatHelper.GetMat(); //convert image to greyscale Imgproc.cvtColor(rgbaMat, grayMat, Imgproc.COLOR_RGBA2GRAY); if (isAutoResetMode || faceTracker.getPoints().Count <= 0) { // Debug.Log ("detectFace"); //convert image to greyscale using (Mat equalizeHistMat = new Mat()) using (MatOfRect faces = new MatOfRect()) { Imgproc.equalizeHist(grayMat, equalizeHistMat); cascade.detectMultiScale(equalizeHistMat, faces, 1.1f, 2, 0 // | Objdetect.CASCADE_FIND_BIGGEST_OBJECT | Objdetect.CASCADE_SCALE_IMAGE, new OpenCVForUnity.Size(equalizeHistMat.cols() * 0.15, equalizeHistMat.cols() * 0.15), new Size()); if (faces.rows() > 0) { // Debug.Log ("faces " + faces.dump ()); List <OpenCVForUnity.Rect> rectsList = faces.toList(); List <Point[]> pointsList = faceTracker.getPoints(); if (isAutoResetMode) { //add initial face points from MatOfRect if (pointsList.Count <= 0) { faceTracker.addPoints(faces); // Debug.Log ("reset faces "); } else { // Disable nose rect display //for (int i = 0; i < rectsList.Count; i++) //{ // OpenCVForUnity.Rect trackRect = new OpenCVForUnity.Rect(rectsList [i].x + rectsList [i].width / 3, rectsList [i].y + rectsList [i].height / 2, rectsList [i].width / 3, rectsList [i].height / 3); // //It determines whether nose point has been included in trackRect. // if (i < pointsList.Count && !trackRect.contains(pointsList [i] [67])) // { // rectsList.RemoveAt(i); // pointsList.RemoveAt(i); // // Debug.Log ("remove " + i); // } // Imgproc.rectangle(rgbaMat, new Point(trackRect.x, trackRect.y), new Point(trackRect.x + trackRect.width, trackRect.y + trackRect.height), new Scalar(0, 0, 255, 255), 2); //} } } else { faceTracker.addPoints(faces); } //draw face rect with largest rect int l = 0; for (int i = 0; i < rectsList.Count; i++) { if (rectsList [i].area() > rectsList [l].area()) { l = i; } #if OPENCV_2 Core.rectangle(rgbaMat, new Point(rectsList [l].x, rectsList [l].y), new Point(rectsList [l].x + rectsList [l].width, rectsList [l].y + rectsList [l].height), new Scalar(255, 0, 0, 255), 2); #else Imgproc.rectangle(rgbaMat, new Point(rectsList [l].x, rectsList [l].y), new Point(rectsList [l].x + rectsList [l].width, rectsList [l].y + rectsList [l].height), new Scalar(255, 0, 0, 255), 2); #endif Imgproc.line(rgbaMat, new Point(rectsList[l].x + rectsList[l].width / 2, rectsList[l].y), new Point(rectsList[l].x + rectsList[l].width / 2, rectsList[l].y - rectsList[l].height / 6), new Scalar(255, 0, 0, 255), 2); Imgproc.line(rgbaMat, new Point(rectsList[l].x + rectsList[l].width / 2, rectsList[l].y + rectsList[l].height), new Point(rectsList[l].x + rectsList[l].width / 2, rectsList[l].y + rectsList[l].height * 7 / 6), new Scalar(255, 0, 0, 255), 2); Imgproc.line(rgbaMat, new Point(rectsList[l].x - rectsList[l].width / 6, rectsList[l].y + rectsList[l].height / 2), new Point(rectsList[l].x, rectsList[l].y + rectsList[l].height / 2), new Scalar(255, 0, 0, 255), 2); Imgproc.line(rgbaMat, new Point(rectsList[l].x + rectsList[l].width * 7 / 6, rectsList[l].y + rectsList[l].height / 2), new Point(rectsList[l].x + rectsList[l].width, rectsList[l].y + rectsList[l].height / 2), new Scalar(255, 0, 0, 255), 2); } //grayscale or rgba //Mat croppedImage = new Mat(grayMat, rectsList[l]); int x = rectsList[l].x; int y = rectsList[l].y; int w = rectsList[l].width; int h = rectsList[l].height; Color[] c = texture.GetPixels(x, y, w, h); m2Texture = new Texture2D(w, h); m2Texture.SetPixels(c); m2Texture.Apply(); byte[] imageBytes = m2Texture.EncodeToJPG(); Destroy(m2Texture); //Debug //File.WriteAllBytes(Application.dataPath + "/image.jpg", imageBytes); //StartCoroutine(PostRequest("http://localhost:5000/face_detection", imageBytes)); if (isRequestCompleted) { StartCoroutine(PostRequest("http://" + apiserverip + ":5000/face_detection", imageBytes)); } // Display rect on texture } else { if (isAutoResetMode) { faceTracker.reset(); } } } } //track face points.if face points <= 0, always return false. //if (faceTracker.track(grayMat, faceTrackerParams)) // faceTracker.draw(rgbaMat, new Scalar(255, 0, 0, 255), new Scalar(0, 255, 0, 255)); //#if OPENCV_2 //Core.putText (rgbaMat, "Contribution " + contribution + " repos " + publicRepos + " followers " + followers + " gists " + publicGists, new Point (5, rgbaMat.rows () - 5), Core.FONT_HERSHEY_SIMPLEX, 0.8, new Scalar (255, 255, 255, 255), 2, Core.LINE_AA, false); //#else //Imgproc.putText(rgbaMat, "Contribution " + contribution + " repos " + publicRepos + " followers " + followers + " gists " + publicGists, new Point(5, rgbaMat.rows() - 5), Core.FONT_HERSHEY_SIMPLEX, 0.8, new Scalar(255, 255, 255, 255), 2, Imgproc.LINE_AA, false); //#endif //Core.putText (rgbaMat, "W:" + rgbaMat.width () + " H:" + rgbaMat.height () + " SO:" + Screen.orientation, new Point (5, rgbaMat.rows () - 10), Core.FONT_HERSHEY_SIMPLEX, 1.0, new Scalar (255, 255, 255, 255), 2, Core.LINE_AA, false); Utils.matToTexture2D(rgbaMat, texture, webCamTextureToMatHelper.GetBufferColors()); } if (Input.GetKeyUp(KeyCode.Space) || Input.touchCount > 0) { faceTracker.reset(); } }
// Update is called once per frame void Update () { if (!initDone) return; if (screenOrientation != Screen.orientation) { screenOrientation = Screen.orientation; updateLayout (); } #if UNITY_IOS && !UNITY_EDITOR && (UNITY_4_6_3 || UNITY_4_6_4 || UNITY_5_0_0 || UNITY_5_0_1) if (webCamTexture.width > 16 && webCamTexture.height > 16) { #else if (webCamTexture.didUpdateThisFrame) { #endif Utils.webCamTextureToMat (webCamTexture, rgbaMat, colors); //flip to correct direction. if (webCamDevice.isFrontFacing) { if (webCamTexture.videoRotationAngle == 0) { Core.flip (rgbaMat, rgbaMat, 1); } else if (webCamTexture.videoRotationAngle == 90) { Core.flip (rgbaMat, rgbaMat, 0); } if (webCamTexture.videoRotationAngle == 180) { Core.flip (rgbaMat, rgbaMat, 0); } else if (webCamTexture.videoRotationAngle == 270) { Core.flip (rgbaMat, rgbaMat, 1); } } else { if (webCamTexture.videoRotationAngle == 180) { Core.flip (rgbaMat, rgbaMat, -1); } else if (webCamTexture.videoRotationAngle == 270) { Core.flip (rgbaMat, rgbaMat, -1); } } //convert image to greyscale Imgproc.cvtColor (rgbaMat, grayMat, Imgproc.COLOR_RGBA2GRAY); if (faceTracker.getPoints ().Count <= 0) { Debug.Log ("detectFace"); //convert image to greyscale using (Mat equalizeHistMat = new Mat ()) using (MatOfRect faces = new MatOfRect ()) { Imgproc.equalizeHist (grayMat, equalizeHistMat); cascade.detectMultiScale (equalizeHistMat, faces, 1.1f, 2, 0 // | Objdetect.CASCADE_FIND_BIGGEST_OBJECT | Objdetect.CASCADE_SCALE_IMAGE, new OpenCVForUnity.Size (equalizeHistMat.cols () * 0.15, equalizeHistMat.cols () * 0.15), new Size ()); if (faces.rows () > 0) { Debug.Log ("faces " + faces.dump ()); //add initial face points from MatOfRect faceTracker.addPoints (faces); //draw face rect OpenCVForUnity.Rect[] rects = faces.toArray (); for (int i = 0; i < rects.Length; i++) { Core.rectangle (rgbaMat, new Point (rects [i].x, rects [i].y), new Point (rects [i].x + rects [i].width, rects [i].y + rects [i].height), new Scalar (255, 0, 0, 255), 2); } } } } //track face points.if face points <= 0, always return false. if (faceTracker.track (grayMat, faceTrackerParams)) faceTracker.draw (rgbaMat, new Scalar (255, 0, 0, 255), new Scalar (0, 255, 0, 255)); Core.putText (rgbaMat, "'Tap' or 'Space Key' to Reset", new Point (5, rgbaMat.rows () - 5), Core.FONT_HERSHEY_SIMPLEX, 0.8, new Scalar (255, 255, 255, 255), 2, Core.LINE_AA, false); Utils.matToTexture2D (rgbaMat, texture, colors); } if (Input.GetKeyUp (KeyCode.Space) || Input.touchCount > 0) { faceTracker.reset (); } } void OnDisable () { webCamTexture.Stop (); }
// 更新は、フレームごとに呼ばれます void Update() { if (!initDone) { return; } if (screenOrientation != Screen.orientation) { screenOrientation = Screen.orientation; updateLayout(); } #if UNITY_IOS && !UNITY_EDITOR && (UNITY_4_6_3 || UNITY_4_6_4 || UNITY_5_0_0 || UNITY_5_0_1) if (webCamTexture.width > 16 && webCamTexture.height > 16) { #else if (webCamTexture.didUpdateThisFrame) { #endif Utils.webCamTextureToMat(webCamTexture, rgbaMat, colors); // 方向を修正するために反転. if (webCamDevice.isFrontFacing) { if (webCamTexture.videoRotationAngle == 0) { Core.flip(rgbaMat, rgbaMat, 1); } else if (webCamTexture.videoRotationAngle == 90) { Core.flip(rgbaMat, rgbaMat, 0); } if (webCamTexture.videoRotationAngle == 180) { Core.flip(rgbaMat, rgbaMat, 0); } else if (webCamTexture.videoRotationAngle == 270) { Core.flip(rgbaMat, rgbaMat, 1); } } else { if (webCamTexture.videoRotationAngle == 180) { Core.flip(rgbaMat, rgbaMat, -1); } else if (webCamTexture.videoRotationAngle == 270) { Core.flip(rgbaMat, rgbaMat, -1); } } // グレースケールにイメージを変換します Imgproc.cvtColor(rgbaMat, grayMat, Imgproc.COLOR_RGBA2GRAY); if (faceTracker.getPoints().Count <= 0) { Debug.Log("detectFace"); // グレースケールにイメージを変換します using (Mat equalizeHistMat = new Mat()) using (MatOfRect faces = new MatOfRect()) { Imgproc.equalizeHist(grayMat, equalizeHistMat); cascade.detectMultiScale(equalizeHistMat, faces, 1.1f, 2, 0 | Objdetect.CASCADE_FIND_BIGGEST_OBJECT | Objdetect.CASCADE_SCALE_IMAGE, new OpenCVForUnity.Size(equalizeHistMat.cols() * 0.15, equalizeHistMat.cols() * 0.15), new Size()); if (faces.rows() > 0) { Debug.Log("--------------------------faces"); Debug.Log("faces " + faces.dump()); Debug.Log("--------------------------faces"); // MatOfRectから顔の初期座標を追加 faceTracker.addPoints(faces); // 顔の四角形を描きます OpenCVForUnity.Rect[] rects = faces.toArray(); for (int i = 0; i < rects.Length; i++) { Core.rectangle(rgbaMat, new Point(rects[i].x, rects[i].y), new Point(rects[i].x + rects[i].width, rects[i].y + rects[i].height), new Scalar(255, 0, 0, 255), 2); } } } } // 顔の座標を追跡します.if face points <= 0, always return false. if (faceTracker.track(grayMat, faceTrackerParams)) { if (isDrawPoints) { //Debug.Log("--------------------------100-----------------------------------"); faceTracker.draw(rgbaMat, new Scalar(255, 0, 0, 255), new Scalar(0, 255, 0, 255)); } //Debug.Log("--------------------------1000"); //Core.putText(rgbaMat, "'Tap' or 'Space Key' to Reset", new Point(5, rgbaMat.rows() - 5), Core.FONT_HERSHEY_SIMPLEX, 0.8, new Scalar(255, 255, 255, 255), 2, Core.LINE_AA, false); Point[] points = faceTracker.getPoints()[0]; //eyeWidth = points[36] - points[31]; /* * Debug.Log("--------------------------0"); * Debug.Log(points[31]); * Debug.Log("--------------------------2"); * Debug.Log(points[36]); * Debug.Log("--------------------------1"); */ if (points.Length > 0) { imagePoints.fromArray( points[31], //l eye points[36], //r eye points[67], // nose points[48], //l mouth points[54] //r mouth ); Calib3d.solvePnP(objectPoints, imagePoints, camMatrix, distCoeffs, rvec, tvec); bool isRefresh = false; if (tvec.get(2, 0)[0] > 0 && tvec.get(2, 0)[0] < 1200 * ((float)webCamTexture.width / (float)width)) { isRefresh = true; if (oldRvec == null) { oldRvec = new Mat(); rvec.copyTo(oldRvec); } if (oldTvec == null) { oldTvec = new Mat(); tvec.copyTo(oldTvec); } // Rvecノイズをフィルタリング. using (Mat absDiffRvec = new Mat()) { Core.absdiff(rvec, oldRvec, absDiffRvec); using (Mat cmpRvec = new Mat()) { Core.compare(absDiffRvec, new Scalar(rvecNoiseFilterRange), cmpRvec, Core.CMP_GT); if (Core.countNonZero(cmpRvec) > 0) { isRefresh = false; } } } // Tvecノイズをフィルタリング. using (Mat absDiffTvec = new Mat()) { Core.absdiff(tvec, oldTvec, absDiffTvec); using (Mat cmpTvec = new Mat()) { Core.compare(absDiffTvec, new Scalar(tvecNoiseFilterRange), cmpTvec, Core.CMP_GT); if (Core.countNonZero(cmpTvec) > 0) { isRefresh = false; } } } } if (isRefresh) { if (!rightEye.activeSelf) { rightEye.SetActive(true); } if (!leftEye.activeSelf) { leftEye.SetActive(true); } rvec.copyTo(oldRvec); tvec.copyTo(oldTvec); Calib3d.Rodrigues(rvec, rotM); transformationM.SetRow(0, new Vector4((float)rotM.get(0, 0)[0], (float)rotM.get(0, 1)[0], (float)rotM.get(0, 2)[0], (float)tvec.get(0, 0)[0])); transformationM.SetRow(1, new Vector4((float)rotM.get(1, 0)[0], (float)rotM.get(1, 1)[0], (float)rotM.get(1, 2)[0], (float)tvec.get(1, 0)[0])); transformationM.SetRow(2, new Vector4((float)rotM.get(2, 0)[0], (float)rotM.get(2, 1)[0], (float)rotM.get(2, 2)[0], (float)tvec.get(2, 0)[0])); transformationM.SetRow(3, new Vector4(0, 0, 0, 1)); modelViewMtrx = lookAtM * transformationM * invertZM; ARCamera.worldToCameraMatrix = modelViewMtrx; } } } Utils.matToTexture2D(rgbaMat, texture, colors); } if (Input.GetKeyUp(KeyCode.Space) || Input.touchCount > 0) { faceTracker.reset(); if (oldRvec != null) { oldRvec.Dispose(); oldRvec = null; } if (oldTvec != null) { oldTvec.Dispose(); oldTvec = null; } ARCamera.ResetWorldToCameraMatrix(); rightEye.SetActive(false); leftEye.SetActive(false); } } void OnDisable() { webCamTexture.Stop(); }
// Update is called once per frame void Update() { if (webCamTextureToMatHelper.IsPlaying() && webCamTextureToMatHelper.DidUpdateThisFrame()) { Mat rgbaMat = webCamTextureToMatHelper.GetMat(); //convert image to greyscale Imgproc.cvtColor(rgbaMat, grayMat, Imgproc.COLOR_RGBA2GRAY); if (isAutoResetMode || faceTracker.getPoints().Count <= 0) { // Debug.Log ("detectFace"); //convert image to greyscale using (Mat equalizeHistMat = new Mat()) using (MatOfRect faces = new MatOfRect()) { Imgproc.equalizeHist(grayMat, equalizeHistMat); cascade.detectMultiScale(equalizeHistMat, faces, 1.1f, 2, 0 // | Objdetect.CASCADE_FIND_BIGGEST_OBJECT | Objdetect.CASCADE_SCALE_IMAGE, new Size(equalizeHistMat.cols() * 0.15, equalizeHistMat.cols() * 0.15), new Size()); if (faces.rows() > 0) { // Debug.Log ("faces " + faces.dump ()); List <OpenCVForUnity.CoreModule.Rect> rectsList = faces.toList(); List <Point[]> pointsList = faceTracker.getPoints(); if (isAutoResetMode) { //add initial face points from MatOfRect if (pointsList.Count <= 0) { faceTracker.addPoints(faces); // Debug.Log ("reset faces "); } else { for (int i = 0; i < rectsList.Count; i++) { OpenCVForUnity.CoreModule.Rect trackRect = new OpenCVForUnity.CoreModule.Rect(rectsList [i].x + rectsList [i].width / 3, rectsList [i].y + rectsList [i].height / 2, rectsList [i].width / 3, rectsList [i].height / 3); //It determines whether nose point has been included in trackRect. if (i < pointsList.Count && !trackRect.contains(pointsList [i] [67])) { rectsList.RemoveAt(i); pointsList.RemoveAt(i); // Debug.Log ("remove " + i); } //uncomment below for rectangle around face Imgproc.rectangle(rgbaMat, new Point(trackRect.x, trackRect.y), new Point(trackRect.x + trackRect.width, trackRect.y + trackRect.height), new Scalar(0, 0, 255, 255), 2); } } } else { faceTracker.addPoints(faces); } //draw face rect for (int i = 0; i < rectsList.Count; i++) { //uncomment below for rectangle around face Imgproc.rectangle(rgbaMat, new Point(rectsList [i].x, rectsList [i].y), new Point(rectsList [i].x + rectsList [i].width, rectsList [i].y + rectsList [i].height), new Scalar(255, 0, 0, 255), 2); } } else { if (isAutoResetMode) { faceTracker.reset(); } } } } //track face points.if face points <= 0, always return false. if (faceTracker.track(grayMat, faceTrackerParams)) { //GameObject.FindGameObjectWithTag("left hand").transform.localScale = new Vector3(0.05f, 0.05f, 50); //GameObject.FindGameObjectWithTag("right hand").transform.localScale = new Vector3(0.05f, 0.05f, 50); //facecount = 0; if (facerec > 15) { GameObject.FindGameObjectWithTag("left hand").transform.localScale = new Vector3(0.2f, 0.2f, 50); GameObject.FindGameObjectWithTag("right hand").transform.localScale = new Vector3(0.2f, 0.2f, 50); facecount = 0; } else { facerec++; } //uncomment below for rectangle around face //faceTracker.draw(rgbaMat, new Scalar(255, 0, 0, 255), new Scalar(0, 255, 0, 255)); } else { //facecount prevents flickering of hand from poor face recognition if (facecount > 15) { facerec = 0; GameObject.FindGameObjectWithTag("left hand").transform.localScale = new Vector3(0f, 0f, 0); GameObject.FindGameObjectWithTag("right hand").transform.localScale = new Vector3(0f, 0f, 0); facecount++; } else { facecount++; } } //Imgproc.putText (rgbaMat, "'Tap' or 'Space Key' to Reset", new Point (5, rgbaMat.rows () - 5), Imgproc.FONT_HERSHEY_SIMPLEX, 0.8, new Scalar (255, 255, 255, 255), 2, Imgproc.LINE_AA, false); // Imgproc.putText (rgbaMat, "W:" + rgbaMat.width () + " H:" + rgbaMat.height () + " SO:" + Screen.orientation, new Point (5, rgbaMat.rows () - 10), Imgproc.FONT_HERSHEY_SIMPLEX, 1.0, new Scalar (255, 255, 255, 255), 2, Imgproc.LINE_AA, false); Utils.fastMatToTexture2D(rgbaMat, texture); } //facetrac resets upon screen click and space bar if (Input.GetKeyUp(KeyCode.Space) || Input.touchCount > 0) { faceTracker.reset(); } if (Input.GetKeyDown(KeyCode.Escape)) { if (SpeechRecognizer.IsRecording()) { SpeechRecognizer.StopIfRecording(); //resultText.text = "I stopped recording"; } Application.Quit(); //Application.LoadLevel ("MainActivity.class"); } }
/// <summary> /// Raises the webcam texture to mat helper error occurred event. /// </summary> /// <param name="errorCode">Error code.</param> // public void OnWebCamTextureToMatHelperErrorOccurred(WebCamTextureToMatHelper.ErrorCode errorCode) // { // Debug.Log("OnWebCamTextureToMatHelperErrorOccurred " + errorCode); // } // Update is called once per frame void Update() { if (webCamTextureToMatHelper.IsPlaying() && webCamTextureToMatHelper.DidUpdateThisFrame()) { Mat rgbaMat = webCamTextureToMatHelper.GetMat(); //convert image to greyscale Imgproc.cvtColor(rgbaMat, grayMat, Imgproc.COLOR_RGBA2GRAY); if (isAutoResetMode || faceTracker.getPoints().Count <= 0) { // Debug.Log ("detectFace"); //convert image to greyscale using (var equalizeHistMat = new Mat()) using (var faces = new MatOfRect()) { Imgproc.equalizeHist(grayMat, equalizeHistMat); cascade.detectMultiScale(equalizeHistMat, faces, 1.1f, 2, 0 | Objdetect.CASCADE_SCALE_IMAGE, new OpenCVForUnity.Size(equalizeHistMat.cols() * 0.15, equalizeHistMat.cols() * 0.15), new Size() ); if (faces.rows() > 0) { // Debug.Log ("faces " + faces.dump ()); List <OpenCVForUnity.Rect> rectsList = faces.toList(); List <Point[]> pointsList = faceTracker.getPoints(); if (isAutoResetMode) { //add initial face points from MatOfRect if (pointsList.Count <= 0) { faceTracker.addPoints(faces); // Debug.Log ("reset faces "); } else { for (int i = 0; i < rectsList.Count; i++) { var trackRect = new OpenCVForUnity.Rect(rectsList [i].x + rectsList [i].width / 3, rectsList [i].y + rectsList [i].height / 2, rectsList [i].width / 3, rectsList [i].height / 3); //It determines whether nose point has been included in trackRect. if (i < pointsList.Count && !trackRect.contains(pointsList [i] [67])) { rectsList.RemoveAt(i); pointsList.RemoveAt(i); // Debug.Log ("remove " + i); } Imgproc.rectangle(rgbaMat, new Point(trackRect.x, trackRect.y), new Point(trackRect.x + trackRect.width, trackRect.y + trackRect.height), new Scalar(0, 0, 255, 255), 2); } } } else { faceTracker.addPoints(faces); } // ターゲットメッシュのリストを更新 { while (targetMeshList.Count < rectsList.Count) { var obj = Instantiate(targetMeshPrefab).GetComponent <MeshRenderer>(); obj.transform.rotation = Quaternion.Euler(-90, 0, 0); targetMeshList.Add(obj); } for (int i = targetMeshList.Count - 1; i >= 0; --i) { if (i >= rectsList.Count) { targetMeshList[i].material.color = Color.clear; } else { targetMeshList[i].material.color = Color.red; } } } //draw face rect for (int i = 0; i < rectsList.Count; i++) { #if OPENCV_2 Core.rectangle(rgbaMat, new Point(rectsList [i].x, rectsList [i].y), new Point(rectsList [i].x + rectsLIst [i].width, rectsList [i].y + rectsList [i].height), new Scalar(255, 0, 0, 255), 2); #else Imgproc.rectangle(rgbaMat, new Point(rectsList [i].x, rectsList [i].y), new Point(rectsList [i].x + rectsList [i].width, rectsList [i].y + rectsList [i].height), new Scalar(255, 0, 0, 255), 2); #endif if (i < targetMeshList.Count) { var rectPos = new Vector2(rectsList[i].x, rectsList[i].y); // targetMeshList[i].transform.position = new Vector3( rectsList[i].x, rectsList[i].y, targetMeshPosZ ); var ray = RectTransformUtility.ScreenPointToRay(Camera.main, rectPos); RaycastHit hit; if (Physics.Raycast(ray, out hit)) { targetMeshList[i].transform.position = hit.point + hit.normal * 0.5f; } targetImage.transform.localPosition = new Vector2(rectsList[i].x, rectsList[i].y); } } } else { if (isAutoResetMode) { faceTracker.reset(); } } } } //track face points.if face points <= 0, always return false. if (faceTracker.track(grayMat, faceTrackerParams)) { faceTracker.draw(rgbaMat, new Scalar(255, 0, 0, 255), new Scalar(0, 255, 0, 255)); } #if OPENCV_2 Core.putText(rgbaMat, "'Tap' or 'Space Key' to Reset", new Point(5, rgbaMat.rows() - 5), Core.FONT_HERSHEY_SIMPLEX, 0.8, new Scalar(255, 255, 255, 255), 2, Core.LINE_AA, false); #else Imgproc.putText(rgbaMat, "'Tap' or 'Space Key' to Reset", new Point(5, rgbaMat.rows() - 5), Core.FONT_HERSHEY_SIMPLEX, 0.8, new Scalar(255, 255, 255, 255), 2, Imgproc.LINE_AA, false); #endif // Core.putText (rgbaMat, "W:" + rgbaMat.width () + " H:" + rgbaMat.height () + " SO:" + Screen.orientation, new Point (5, rgbaMat.rows () - 10), Core.FONT_HERSHEY_SIMPLEX, 1.0, new Scalar (255, 255, 255, 255), 2, Core.LINE_AA, false); Utils.matToTexture2D(rgbaMat, texture, webCamTextureToMatHelper.GetBufferColors()); } if (Input.GetKeyUp(KeyCode.Space) || Input.touchCount > 0) { faceTracker.reset(); } }
private IEnumerator init() { if (webCamTexture != null) { faceTracker.reset(); webCamTexture.Stop(); initDone = false; rgbaMat.Dispose(); grayMat.Dispose(); } // Checks how many and which cameras are available on the device for (int cameraIndex = 0; cameraIndex < WebCamTexture.devices.Length; cameraIndex++) { if (WebCamTexture.devices [cameraIndex].isFrontFacing == isFrontFacing) { Debug.Log(cameraIndex + " name " + WebCamTexture.devices [cameraIndex].name + " isFrontFacing " + WebCamTexture.devices [cameraIndex].isFrontFacing); webCamDevice = WebCamTexture.devices [cameraIndex]; webCamTexture = new WebCamTexture(webCamDevice.name, width, height); break; } } if (webCamTexture == null) { webCamDevice = WebCamTexture.devices [0]; webCamTexture = new WebCamTexture(webCamDevice.name, width, height); } Debug.Log("width " + webCamTexture.width + " height " + webCamTexture.height + " fps " + webCamTexture.requestedFPS); // Starts the camera webCamTexture.Play(); while (true) { //If you want to use webcamTexture.width and webcamTexture.height on iOS, you have to wait until webcamTexture.didUpdateThisFrame == 1, otherwise these two values will be equal to 16. (http://forum.unity3d.com/threads/webcamtexture-and-error-0x0502.123922/) #if UNITY_IOS && !UNITY_EDITOR && (UNITY_4_6_3 || UNITY_4_6_4 || UNITY_5_0_0 || UNITY_5_0_1) if (webCamTexture.width > 16 && webCamTexture.height > 16) { #else if (webCamTexture.didUpdateThisFrame) { #endif Debug.Log("width " + webCamTexture.width + " height " + webCamTexture.height + " fps " + webCamTexture.requestedFPS); Debug.Log("videoRotationAngle " + webCamTexture.videoRotationAngle + " videoVerticallyMirrored " + webCamTexture.videoVerticallyMirrored + " isFrongFacing " + webCamDevice.isFrontFacing); colors = new Color32[webCamTexture.width * webCamTexture.height]; rgbaMat = new Mat(webCamTexture.height, webCamTexture.width, CvType.CV_8UC4); grayMat = new Mat(webCamTexture.height, webCamTexture.width, CvType.CV_8UC1); texture = new Texture2D(webCamTexture.width, webCamTexture.height, TextureFormat.RGBA32, false); cascade = new CascadeClassifier(Utils.getFilePath("haarcascade_frontalface_alt.xml")); if (cascade.empty()) { Debug.LogError("cascade file is not loaded.Please copy from “FaceTrackerSample/StreamingAssets/” to “Assets/StreamingAssets/” folder. "); } gameObject.transform.localEulerAngles = new Vector3(0, 0, 0); // gameObject.transform.rotation = gameObject.transform.rotation * Quaternion.AngleAxis (webCamTexture.videoRotationAngle, Vector3.back); gameObject.transform.localScale = new Vector3(webCamTexture.width, webCamTexture.height, 1); // bool _videoVerticallyMirrored = webCamTexture.videoVerticallyMirrored; // float scaleX = 1; // float scaleY = _videoVerticallyMirrored ? -1.0f : 1.0f; // gameObject.transform.localScale = new Vector3 (scaleX * gameObject.transform.localScale.x, scaleY * gameObject.transform.localScale.y, 1); gameObject.GetComponent <Renderer> ().material.mainTexture = texture; Camera.main.orthographicSize = webCamTexture.height / 2; initDone = true; break; } else { yield return(0); } } } // Update is called once per frame void Update() { if (!initDone) { return; } #if UNITY_IOS && !UNITY_EDITOR && (UNITY_4_6_3 || UNITY_4_6_4 || UNITY_5_0_0 || UNITY_5_0_1) if (webCamTexture.width > 16 && webCamTexture.height > 16) { #else if (webCamTexture.didUpdateThisFrame) { #endif Utils.webCamTextureToMat(webCamTexture, rgbaMat, colors); //flip to correct direction. if (webCamTexture.videoVerticallyMirrored) { if (webCamDevice.isFrontFacing) { if (webCamTexture.videoRotationAngle == 0) { Core.flip(rgbaMat, rgbaMat, -1); } else if (webCamTexture.videoRotationAngle == 180) { Core.flip(rgbaMat, rgbaMat, 0); } } else { if (webCamTexture.videoRotationAngle == 0) { } else if (webCamTexture.videoRotationAngle == 180) { Core.flip(rgbaMat, rgbaMat, 1); } } } else { if (webCamDevice.isFrontFacing) { if (webCamTexture.videoRotationAngle == 0) { Core.flip(rgbaMat, rgbaMat, 1); } else if (webCamTexture.videoRotationAngle == 180) { Core.flip(rgbaMat, rgbaMat, 0); } } else { if (webCamTexture.videoRotationAngle == 0) { } else if (webCamTexture.videoRotationAngle == 180) { Core.flip(rgbaMat, rgbaMat, -1); } } } //convert image to greyscale Imgproc.cvtColor(rgbaMat, grayMat, Imgproc.COLOR_RGBA2GRAY); if (faceTracker.getPoints().Count <= 0) { Debug.Log("detectFace"); //convert image to greyscale using (Mat equalizeHistMat = new Mat()) using (MatOfRect faces = new MatOfRect()) { Imgproc.equalizeHist(grayMat, equalizeHistMat); cascade.detectMultiScale(equalizeHistMat, faces, 1.1f, 2, 0 // | Objdetect.CASCADE_FIND_BIGGEST_OBJECT | Objdetect.CASCADE_SCALE_IMAGE, new OpenCVForUnity.Size(equalizeHistMat.cols() * 0.15, equalizeHistMat.cols() * 0.15), new Size()); if (faces.rows() > 0) { Debug.Log("faces " + faces.dump()); //add initial face points from MatOfRect faceTracker.addPoints(faces); //draw face rect OpenCVForUnity.Rect[] rects = faces.toArray(); for (int i = 0; i < rects.Length; i++) { Core.rectangle(rgbaMat, new Point(rects [i].x, rects [i].y), new Point(rects [i].x + rects [i].width, rects [i].y + rects [i].height), new Scalar(255, 0, 0, 255), 2); } } } } //track face points.if face points <= 0, always return false. if (faceTracker.track(grayMat, faceTrackerParams)) { faceTracker.draw(rgbaMat, new Scalar(255, 0, 0, 255), new Scalar(0, 255, 0, 255)); } Core.putText(rgbaMat, "'Tap' or 'Space Key' to Reset", new Point(5, rgbaMat.rows() - 5), Core.FONT_HERSHEY_SIMPLEX, 0.8, new Scalar(255, 255, 255, 255), 2, Core.LINE_AA, false); Utils.matToTexture2D(rgbaMat, texture, colors); } if (Input.GetKeyUp(KeyCode.Space) || Input.touchCount > 0) { faceTracker.reset(); } } void OnDisable() { webCamTexture.Stop(); } void OnGUI() { float screenScale = Screen.height / 240.0f; Matrix4x4 scaledMatrix = Matrix4x4.Scale(new Vector3(screenScale, screenScale, screenScale)); GUI.matrix = scaledMatrix; GUILayout.BeginVertical(); if (GUILayout.Button("back")) { Application.LoadLevel("FaceTrackerSample"); } if (GUILayout.Button("change camera")) { isFrontFacing = !isFrontFacing; StartCoroutine(init()); } GUILayout.EndVertical(); } } }
// Update is called once per frame void Update () { if (webCamTextureToMatHelper.isPlaying () && webCamTextureToMatHelper.didUpdateThisFrame ()) { Mat rgbaMat = webCamTextureToMatHelper.GetMat (); //convert image to greyscale Imgproc.cvtColor (rgbaMat, grayMat, Imgproc.COLOR_RGBA2GRAY); if (faceTracker.getPoints ().Count <= 0) { Debug.Log ("detectFace"); //convert image to greyscale using (Mat equalizeHistMat = new Mat ()) using (MatOfRect faces = new MatOfRect ()) { Imgproc.equalizeHist (grayMat, equalizeHistMat); cascade.detectMultiScale (equalizeHistMat, faces, 1.1f, 2, 0 // | Objdetect.CASCADE_FIND_BIGGEST_OBJECT | Objdetect.CASCADE_SCALE_IMAGE, new OpenCVForUnity.Size (equalizeHistMat.cols () * 0.15, equalizeHistMat.cols () * 0.15), new Size ()); if (faces.rows () > 0) { Debug.Log ("faces " + faces.dump ()); //add initial face points from MatOfRect faceTracker.addPoints (faces); //draw face rect OpenCVForUnity.Rect[] rects = faces.toArray (); for (int i = 0; i < rects.Length; i++) { #if OPENCV_3 Imgproc.rectangle(rgbaMat, new Point(rects[i].x, rects[i].y), new Point(rects[i].x + rects[i].width, rects[i].y + rects[i].height), new Scalar(255, 0, 0, 255), 2); #else Core.rectangle (rgbaMat, new Point (rects [i].x, rects [i].y), new Point (rects [i].x + rects [i].width, rects [i].y + rects [i].height), new Scalar (255, 0, 0, 255), 2); #endif } } } } //track face points.if face points <= 0, always return false. if (faceTracker.track (grayMat, faceTrackerParams)) faceTracker.draw (rgbaMat, new Scalar (255, 0, 0, 255), new Scalar (0, 255, 0, 255)); #if OPENCV_3 Imgproc.putText(rgbaMat, "'Tap' or 'Space Key' to Reset", new Point(5, rgbaMat.rows() - 5), Core.FONT_HERSHEY_SIMPLEX, 0.8, new Scalar(255, 255, 255, 255), 2, Imgproc.LINE_AA, false); #else Core.putText (rgbaMat, "'Tap' or 'Space Key' to Reset", new Point (5, rgbaMat.rows () - 5), Core.FONT_HERSHEY_SIMPLEX, 0.8, new Scalar (255, 255, 255, 255), 2, Core.LINE_AA, false); #endif // Core.putText (rgbaMat, "W:" + rgbaMat.width () + " H:" + rgbaMat.height () + " SO:" + Screen.orientation, new Point (5, rgbaMat.rows () - 10), Core.FONT_HERSHEY_SIMPLEX, 1.0, new Scalar (255, 255, 255, 255), 2, Core.LINE_AA, false); Utils.matToTexture2D (rgbaMat, texture, colors); } if (Input.GetKeyUp (KeyCode.Space) || Input.touchCount > 0) { faceTracker.reset (); } }
private void Run() { gameObject.transform.localScale = new Vector3(imgTexture.width, imgTexture.height, 1); Debug.Log("Screen.width " + Screen.width + " Screen.height " + Screen.height + " Screen.orientation " + Screen.orientation); float width = 0; float height = 0; width = gameObject.transform.localScale.x; height = gameObject.transform.localScale.y; float widthScale = (float)Screen.width / width; float heightScale = (float)Screen.height / height; if (widthScale < heightScale) { Camera.main.orthographicSize = (width * (float)Screen.height / (float)Screen.width) / 2; } else { Camera.main.orthographicSize = height / 2; } //initialize FaceTracker FaceTracker faceTracker = new FaceTracker(tracker_model_json_filepath); //initialize FaceTrackerParams FaceTrackerParams faceTrackerParams = new FaceTrackerParams(); Mat imgMat = new Mat(imgTexture.height, imgTexture.width, CvType.CV_8UC4); Utils.texture2DToMat(imgTexture, imgMat); Debug.Log("imgMat dst ToString " + imgMat.ToString()); CascadeClassifier cascade = new CascadeClassifier(); cascade.load(haarcascade_frontalface_alt_xml_filepath); //if (cascade.empty()) //{ // Debug.LogError("cascade file is not loaded.Please copy from “FaceTrackerExample/StreamingAssets/” to “Assets/StreamingAssets/” folder. "); //} //convert image to greyscale Mat gray = new Mat(); Imgproc.cvtColor(imgMat, gray, Imgproc.COLOR_RGBA2GRAY); MatOfRect faces = new MatOfRect(); Imgproc.equalizeHist(gray, gray); cascade.detectMultiScale(gray, faces, 1.1f, 2, 0 // | Objdetect.CASCADE_FIND_BIGGEST_OBJECT | Objdetect.CASCADE_SCALE_IMAGE, new Size(gray.cols() * 0.05, gray.cols() * 0.05), new Size()); Debug.Log("faces " + faces.dump()); if (faces.rows() > 0) { //add initial face points from MatOfRect faceTracker.addPoints(faces); } //track face points.if face points <= 0, always return false. if (faceTracker.track(imgMat, faceTrackerParams)) { faceTracker.draw(imgMat, new Scalar(255, 0, 0, 255), new Scalar(0, 255, 0, 255)); } Texture2D texture = new Texture2D(imgMat.cols(), imgMat.rows(), TextureFormat.RGBA32, false); Utils.matToTexture2D(imgMat, texture); gameObject.GetComponent <Renderer>().material.mainTexture = texture; cascade.Dispose(); }
private IEnumerator init() { axes.SetActive (false); head.SetActive (false); rightEye.SetActive (false); leftEye.SetActive (false); mouth.SetActive (false); if (webCamTexture != null) { faceTracker.reset (); webCamTexture.Stop (); initDone = false; rgbaMat.Dispose (); grayMat.Dispose (); cascade.Dispose (); camMatrix.Dispose (); distCoeffs.Dispose (); } // Checks how many and which cameras are available on the device for (int cameraIndex = 0; cameraIndex < WebCamTexture.devices.Length; cameraIndex++) { if (WebCamTexture.devices [cameraIndex].isFrontFacing == isFrontFacing) { Debug.Log (cameraIndex + " name " + WebCamTexture.devices [cameraIndex].name + " isFrontFacing " + WebCamTexture.devices [cameraIndex].isFrontFacing); webCamDevice = WebCamTexture.devices [cameraIndex]; webCamTexture = new WebCamTexture (webCamDevice.name, width, height); break; } } if (webCamTexture == null) { webCamDevice = WebCamTexture.devices [0]; webCamTexture = new WebCamTexture (webCamDevice.name, width, height); } Debug.Log ("width " + webCamTexture.width + " height " + webCamTexture.height + " fps " + webCamTexture.requestedFPS); // Starts the camera webCamTexture.Play (); while (true) { //If you want to use webcamTexture.width and webcamTexture.height on iOS, you have to wait until webcamTexture.didUpdateThisFrame == 1, otherwise these two values will be equal to 16. (http://forum.unity3d.com/threads/webcamtexture-and-error-0x0502.123922/) #if UNITY_IOS && !UNITY_EDITOR && (UNITY_4_6_3 || UNITY_4_6_4 || UNITY_5_0_0 || UNITY_5_0_1) if (webCamTexture.width > 16 && webCamTexture.height > 16) { #else if (webCamTexture.didUpdateThisFrame) { #endif Debug.Log ("width " + webCamTexture.width + " height " + webCamTexture.height + " fps " + webCamTexture.requestedFPS); Debug.Log ("videoRotationAngle " + webCamTexture.videoRotationAngle + " videoVerticallyMirrored " + webCamTexture.videoVerticallyMirrored + " isFrongFacing " + webCamDevice.isFrontFacing); colors = new Color32[webCamTexture.width * webCamTexture.height]; rgbaMat = new Mat (webCamTexture.height, webCamTexture.width, CvType.CV_8UC4); grayMat = new Mat (webCamTexture.height, webCamTexture.width, CvType.CV_8UC1); texture = new Texture2D (webCamTexture.width, webCamTexture.height, TextureFormat.RGBA32, false); cascade = new CascadeClassifier (Utils.getFilePath ("haarcascade_frontalface_alt.xml")); if (cascade.empty ()) { Debug.LogError ("cascade file is not loaded.Please copy from “FaceTrackerSample/StreamingAssets/” to “Assets/StreamingAssets/” folder. "); } gameObject.transform.localScale = new Vector3 (webCamTexture.width, webCamTexture.height, 1); gameObject.transform.localEulerAngles = new Vector3 (0, 0, 0); // gameObject.transform.rotation = gameObject.transform.rotation * Quaternion.AngleAxis (webCamTexture.videoRotationAngle, Vector3.back); // bool _videoVerticallyMirrored = webCamTexture.videoVerticallyMirrored; // float scaleX = 1; // float scaleY = _videoVerticallyMirrored ? -1.0f : 1.0f; // gameObject.transform.localScale = new Vector3 (scaleX * gameObject.transform.localScale.x, scaleY * gameObject.transform.localScale.y, 1); gameObject.GetComponent<Renderer> ().material.mainTexture = texture; Camera.main.orthographicSize = webCamTexture.height / 2; int max_d = Mathf.Max (rgbaMat.rows (), rgbaMat.cols ()); camMatrix = new Mat (3, 3, CvType.CV_64FC1); camMatrix.put (0, 0, max_d); camMatrix.put (0, 1, 0); camMatrix.put (0, 2, rgbaMat.cols () / 2.0f); camMatrix.put (1, 0, 0); camMatrix.put (1, 1, max_d); camMatrix.put (1, 2, rgbaMat.rows () / 2.0f); camMatrix.put (2, 0, 0); camMatrix.put (2, 1, 0); camMatrix.put (2, 2, 1.0f); Size imageSize = new Size (rgbaMat.cols (), rgbaMat.rows ()); double apertureWidth = 0; double apertureHeight = 0; double[] fovx = new double[1]; double[] fovy = new double[1]; double[] focalLength = new double[1]; Point principalPoint = new Point (); double[] aspectratio = new double[1]; Calib3d.calibrationMatrixValues (camMatrix, imageSize, apertureWidth, apertureHeight, fovx, fovy, focalLength, principalPoint, aspectratio); Debug.Log ("imageSize " + imageSize.ToString ()); Debug.Log ("apertureWidth " + apertureWidth); Debug.Log ("apertureHeight " + apertureHeight); Debug.Log ("fovx " + fovx [0]); Debug.Log ("fovy " + fovy [0]); Debug.Log ("focalLength " + focalLength [0]); Debug.Log ("principalPoint " + principalPoint.ToString ()); Debug.Log ("aspectratio " + aspectratio [0]); ARCamera.fieldOfView = (float)fovy [0]; Debug.Log ("camMatrix " + camMatrix.dump ()); distCoeffs = new MatOfDouble (0, 0, 0, 0); Debug.Log ("distCoeffs " + distCoeffs.dump ()); lookAtM = getLookAtMatrix (new Vector3 (0, 0, 0), new Vector3 (0, 0, 1), new Vector3 (0, -1, 0)); Debug.Log ("lookAt " + lookAtM.ToString ()); invertZM = Matrix4x4.TRS (Vector3.zero, Quaternion.identity, new Vector3 (1, 1, -1)); initDone = true; break; } else { yield return 0; } } } // Update is called once per frame void Update () { if (!initDone) return; #if UNITY_IOS && !UNITY_EDITOR && (UNITY_4_6_3 || UNITY_4_6_4 || UNITY_5_0_0 || UNITY_5_0_1) if (webCamTexture.width > 16 && webCamTexture.height > 16) { #else if (webCamTexture.didUpdateThisFrame) { #endif Utils.webCamTextureToMat (webCamTexture, rgbaMat, colors); //flip to correct direction. if (webCamTexture.videoVerticallyMirrored) { if (webCamDevice.isFrontFacing) { if (webCamTexture.videoRotationAngle == 0) { Core.flip (rgbaMat, rgbaMat, -1); } else if (webCamTexture.videoRotationAngle == 180) { Core.flip (rgbaMat, rgbaMat, 0); } } else { if (webCamTexture.videoRotationAngle == 0) { } else if (webCamTexture.videoRotationAngle == 180) { Core.flip (rgbaMat, rgbaMat, 1); } } } else { if (webCamDevice.isFrontFacing) { if (webCamTexture.videoRotationAngle == 0) { Core.flip (rgbaMat, rgbaMat, 1); } else if (webCamTexture.videoRotationAngle == 180) { Core.flip (rgbaMat, rgbaMat, 0); } } else { if (webCamTexture.videoRotationAngle == 0) { } else if (webCamTexture.videoRotationAngle == 180) { Core.flip (rgbaMat, rgbaMat, -1); } } } //convert image to greyscale Imgproc.cvtColor (rgbaMat, grayMat, Imgproc.COLOR_RGBA2GRAY); if (faceTracker.getPoints ().Count <= 0) { Debug.Log ("detectFace"); //convert image to greyscale using (Mat equalizeHistMat = new Mat ()) using (MatOfRect faces = new MatOfRect ()) { Imgproc.equalizeHist (grayMat, equalizeHistMat); cascade.detectMultiScale (equalizeHistMat, faces, 1.1f, 2, 0 | Objdetect.CASCADE_FIND_BIGGEST_OBJECT | Objdetect.CASCADE_SCALE_IMAGE, new OpenCVForUnity.Size (equalizeHistMat.cols () * 0.15, equalizeHistMat.cols () * 0.15), new Size ()); if (faces.rows () > 0) { Debug.Log ("faces " + faces.dump ()); //add initial face points from MatOfRect faceTracker.addPoints (faces); //draw face rect OpenCVForUnity.Rect[] rects = faces.toArray (); for (int i = 0; i < rects.Length; i++) { Core.rectangle (rgbaMat, new Point (rects [i].x, rects [i].y), new Point (rects [i].x + rects [i].width, rects [i].y + rects [i].height), new Scalar (255, 0, 0, 255), 2); } } } } //track face points.if face points <= 0, always return false. if (faceTracker.track (grayMat, faceTrackerParams)) { if (isDrawPoints) faceTracker.draw (rgbaMat, new Scalar (255, 0, 0, 255), new Scalar (0, 255, 0, 255)); Core.putText (rgbaMat, "'Tap' or 'Space Key' to Reset", new Point (5, rgbaMat.rows () - 5), Core.FONT_HERSHEY_SIMPLEX, 0.8, new Scalar (255, 255, 255, 255), 2, Core.LINE_AA, false); Point[] points = faceTracker.getPoints () [0]; if (points.Length > 0) { // for (int i = 0; i < points.Length; i++) { // Core.putText (rgbaMat, "" + i, new Point (points [i].x, points [i].y), Core.FONT_HERSHEY_SIMPLEX, 0.3, new Scalar (0, 0, 255, 255), 2, Core.LINE_AA, false); // } imagePoints.fromArray ( points [31],//l eye points [36],//r eye points [67],//nose points [48],//l mouth points [54] //r mouth // , // points [1],//l ear // points [13]//r ear ); Calib3d.solvePnP (objectPoints, imagePoints, camMatrix, distCoeffs, rvec, tvec); bool isRefresh = false; if (tvec.get (2, 0) [0] > 0 && tvec.get (2, 0) [0] < 1200 * ((float)webCamTexture.width / (float)width)) { isRefresh = true; if (oldRvec == null) { oldRvec = new Mat (); rvec.copyTo (oldRvec); } if (oldTvec == null) { oldTvec = new Mat (); tvec.copyTo (oldTvec); } //filter Rvec Noise. using (Mat absDiffRvec = new Mat ()) { Core.absdiff (rvec, oldRvec, absDiffRvec); // Debug.Log ("absDiffRvec " + absDiffRvec.dump()); using (Mat cmpRvec = new Mat ()) { Core.compare (absDiffRvec, new Scalar (rvecNoiseFilterRange), cmpRvec, Core.CMP_GT); if (Core.countNonZero (cmpRvec) > 0) isRefresh = false; } } //filter Tvec Noise. using (Mat absDiffTvec = new Mat ()) { Core.absdiff (tvec, oldTvec, absDiffTvec); // Debug.Log ("absDiffRvec " + absDiffRvec.dump()); using (Mat cmpTvec = new Mat ()) { Core.compare (absDiffTvec, new Scalar (tvecNoiseFilterRange), cmpTvec, Core.CMP_GT); if (Core.countNonZero (cmpTvec) > 0) isRefresh = false; } } } if (isRefresh) { if (!rightEye.activeSelf) rightEye.SetActive (true); if (!leftEye.activeSelf) leftEye.SetActive (true); if ((Mathf.Abs ((float)(points [48].x - points [56].x)) < Mathf.Abs ((float)(points [31].x - points [36].x)) / 2.2 && Mathf.Abs ((float)(points [51].y - points [57].y)) > Mathf.Abs ((float)(points [31].x - points [36].x)) / 2.9) || Mathf.Abs ((float)(points [51].y - points [57].y)) > Mathf.Abs ((float)(points [31].x - points [36].x)) / 2.7) { if (!mouth.activeSelf) mouth.SetActive (true); } else { if (mouth.activeSelf) mouth.SetActive (false); } rvec.copyTo (oldRvec); tvec.copyTo (oldTvec); Calib3d.Rodrigues (rvec, rotM); transformationM .SetRow (0, new Vector4 ((float)rotM.get (0, 0) [0], (float)rotM.get (0, 1) [0], (float)rotM.get (0, 2) [0], (float)tvec.get (0, 0) [0])); transformationM.SetRow (1, new Vector4 ((float)rotM.get (1, 0) [0], (float)rotM.get (1, 1) [0], (float)rotM.get (1, 2) [0], (float)tvec.get (1, 0) [0])); transformationM.SetRow (2, new Vector4 ((float)rotM.get (2, 0) [0], (float)rotM.get (2, 1) [0], (float)rotM.get (2, 2) [0], (float)tvec.get (2, 0) [0])); transformationM.SetRow (3, new Vector4 (0, 0, 0, 1)); modelViewMtrx = lookAtM * transformationM * invertZM; ARCamera.worldToCameraMatrix = modelViewMtrx; // Debug.Log ("modelViewMtrx " + modelViewMtrx.ToString()); } } } Utils.matToTexture2D (rgbaMat, texture, colors); } if (Input.GetKeyUp (KeyCode.Space) || Input.touchCount > 0) { faceTracker.reset (); if (oldRvec != null) { oldRvec.Dispose (); oldRvec = null; } if (oldTvec != null) { oldTvec.Dispose (); oldTvec = null; } ARCamera.ResetWorldToCameraMatrix (); rightEye.SetActive (false); leftEye.SetActive (false); mouth.SetActive (false); } } void OnDisable () { webCamTexture.Stop (); } private Matrix4x4 getLookAtMatrix (Vector3 pos, Vector3 target, Vector3 up) { Vector3 z = Vector3.Normalize (pos - target); Vector3 x = Vector3.Normalize (Vector3.Cross (up, z)); Vector3 y = Vector3.Normalize (Vector3.Cross (z, x)); Matrix4x4 result = new Matrix4x4 (); result.SetRow (0, new Vector4 (x.x, x.y, x.z, -(Vector3.Dot (pos, x)))); result.SetRow (1, new Vector4 (y.x, y.y, y.z, -(Vector3.Dot (pos, y)))); result.SetRow (2, new Vector4 (z.x, z.y, z.z, -(Vector3.Dot (pos, z)))); result.SetRow (3, new Vector4 (0, 0, 0, 1)); return result; } void OnGUI () { float screenScale = Screen.height / 240.0f; Matrix4x4 scaledMatrix = Matrix4x4.Scale (new Vector3 (screenScale, screenScale, screenScale)); GUI.matrix = scaledMatrix; GUILayout.BeginVertical (); if (GUILayout.Button ("back")) { Application.LoadLevel ("FaceTrackerSample"); } if (GUILayout.Button ("change camera")) { isFrontFacing = !isFrontFacing; StartCoroutine (init ()); } if (GUILayout.Button ("drawPoints")) { if (isDrawPoints) { isDrawPoints = false; } else { isDrawPoints = true; } } if (GUILayout.Button ("axes")) { if (axes.activeSelf) { axes.SetActive (false); } else { axes.SetActive (true); } } if (GUILayout.Button ("head")) { if (head.activeSelf) { head.SetActive (false); } else { head.SetActive (true); } } GUILayout.EndVertical (); } }