private void Run() { meshOverlay = this.GetComponent <TrackedMeshOverlay> (); shader_FadeID = Shader.PropertyToID("_Fade"); rectangleTracker = new RectangleTracker(); faceLandmarkDetector = new FaceLandmarkDetector(shape_predictor_68_face_landmarks_dat_filepath); frontalFaceParam = new FrontalFaceParam(); webCamTextureToMatHelper.Init(); isShowingFaceRectsToggle.isOn = isShowingFaceRects; useDlibFaceDetecterToggle.isOn = useDlibFaceDetecter; isFilteringNonFrontalFacesToggle.isOn = isFilteringNonFrontalFaces; isShowingDebugFacePointsToggle.isOn = isShowingDebugFacePoints; }
private void Run() { meshOverlay = this.GetComponent <TrackedMeshOverlay> (); isShowingFaceRectsToggle.isOn = isShowingFaceRects; useDlibFaceDetecterToggle.isOn = useDlibFaceDetecter; isFilteringNonFrontalFacesToggle.isOn = isFilteringNonFrontalFaces; isShowingDebugFacePointsToggle.isOn = isShowingDebugFacePoints; if (imgTexture == null) { imgTexture = Resources.Load("family") as Texture2D; } gameObject.transform.localScale = new Vector3(imgTexture.width, imgTexture.height, 1); Debug.Log("Screen.width " + Screen.width + " Screen.height " + Screen.height + " Screen.orientation " + Screen.orientation); meshOverlay.UpdateOverlayTransform(); meshOverlay.Reset(); float width = 0; float height = 0; width = gameObject.transform.localScale.x; height = gameObject.transform.localScale.y; float widthScale = (float)Screen.width / width; float heightScale = (float)Screen.height / height; if (widthScale < heightScale) { Camera.main.orthographicSize = (width * (float)Screen.height / (float)Screen.width) / 2; } else { Camera.main.orthographicSize = height / 2; } Mat rgbaMat = new Mat(imgTexture.height, imgTexture.width, CvType.CV_8UC4); OpenCVForUnity.Utils.texture2DToMat(imgTexture, rgbaMat); Debug.Log("rgbaMat ToString " + rgbaMat.ToString()); if (faceLandmarkDetector == null) { faceLandmarkDetector = new FaceLandmarkDetector(shape_predictor_68_face_landmarks_dat_filepath); } FrontalFaceParam frontalFaceParam = new FrontalFaceParam(); // face detection. List <OpenCVForUnity.Rect> detectResult = new List <OpenCVForUnity.Rect> (); if (useDlibFaceDetecter) { OpenCVForUnityUtils.SetImage(faceLandmarkDetector, rgbaMat); List <UnityEngine.Rect> result = faceLandmarkDetector.Detect(); foreach (var unityRect in result) { detectResult.Add(new OpenCVForUnity.Rect((int)unityRect.x, (int)unityRect.y, (int)unityRect.width, (int)unityRect.height)); } } else { if (cascade == null) { cascade = new CascadeClassifier(haarcascade_frontalface_alt_xml_filepath); } if (cascade.empty()) { Debug.LogError("cascade file is not loaded.Please copy from “FaceTrackerSample/StreamingAssets/” to “Assets/StreamingAssets/” folder. "); } // convert image to greyscale. Mat gray = new Mat(); Imgproc.cvtColor(rgbaMat, gray, Imgproc.COLOR_RGBA2GRAY); // detect Faces. MatOfRect faces = new MatOfRect(); Imgproc.equalizeHist(gray, gray); cascade.detectMultiScale(gray, faces, 1.1f, 2, 0 | Objdetect.CASCADE_SCALE_IMAGE, new OpenCVForUnity.Size(gray.cols() * 0.05, gray.cols() * 0.05), new Size()); //Debug.Log ("faces " + faces.dump ()); detectResult = faces.toList(); // Adjust to Dilb's result. foreach (OpenCVForUnity.Rect r in detectResult) { r.y += (int)(r.height * 0.1f); } gray.Dispose(); } // detect face landmark. OpenCVForUnityUtils.SetImage(faceLandmarkDetector, rgbaMat); List <List <Vector2> > landmarkPoints = new List <List <Vector2> > (); foreach (var openCVRect in detectResult) { UnityEngine.Rect rect = new UnityEngine.Rect(openCVRect.x, openCVRect.y, openCVRect.width, openCVRect.height); Debug.Log("face : " + rect); //OpenCVForUnityUtils.DrawFaceRect(imgMat, rect, new Scalar(255, 0, 0, 255), 2); List <Vector2> points = faceLandmarkDetector.DetectLandmark(rect); //OpenCVForUnityUtils.DrawFaceLandmark(imgMat, points, new Scalar(0, 255, 0, 255), 2); landmarkPoints.Add(points); } // mask faces. int[] face_nums = new int[landmarkPoints.Count]; for (int i = 0; i < face_nums.Length; i++) { face_nums [i] = i; } face_nums = face_nums.OrderBy(i => System.Guid.NewGuid()).ToArray(); float offsetX = meshOverlay.Width / 2f; float offsetY = meshOverlay.Height / 2f; float maskImageWidth = imgTexture.width; float maskImageHeight = imgTexture.height; TrackedMesh tm; for (int i = 0; i < face_nums.Length; i++) { meshOverlay.CreateObject(i, imgTexture); tm = meshOverlay.GetObjectById(i); Vector3[] vertices = tm.MeshFilter.mesh.vertices; if (vertices.Length == landmarkPoints [face_nums [i]].Count) { for (int j = 0; j < vertices.Length; j++) { vertices [j].x = landmarkPoints [face_nums [i]] [j].x - offsetX; vertices [j].y = offsetY - landmarkPoints [face_nums [i]] [j].y; } } Vector2[] uv = tm.MeshFilter.mesh.uv; if (uv.Length == landmarkPoints [face_nums [0]].Count) { for (int jj = 0; jj < uv.Length; jj++) { uv [jj].x = landmarkPoints [face_nums [0]] [jj].x / maskImageWidth; uv [jj].y = (maskImageHeight - landmarkPoints [face_nums [0]] [jj].y) / maskImageHeight; } } meshOverlay.UpdateObject(i, vertices, null, uv); // filter nonfrontalface. if (isFilteringNonFrontalFaces && frontalFaceParam.getFrontalFaceRate(landmarkPoints [i]) < frontalFaceRateLowerLimit) { tm.Material.SetFloat("_Fade", 1f); } else { tm.Material.SetFloat("_Fade", 0.3f); } } // draw face rects. if (isShowingFaceRects) { int ann = face_nums[0]; UnityEngine.Rect rect_ann = new UnityEngine.Rect(detectResult [ann].x, detectResult [ann].y, detectResult [ann].width, detectResult [ann].height); OpenCVForUnityUtils.DrawFaceRect(rgbaMat, rect_ann, new Scalar(255, 255, 0, 255), 2); int bob = 0; for (int i = 1; i < face_nums.Length; i++) { bob = face_nums [i]; UnityEngine.Rect rect_bob = new UnityEngine.Rect(detectResult [bob].x, detectResult [bob].y, detectResult [bob].width, detectResult [bob].height); OpenCVForUnityUtils.DrawFaceRect(rgbaMat, rect_bob, new Scalar(255, 0, 0, 255), 2); } } // draw face points. if (isShowingDebugFacePoints) { for (int i = 0; i < landmarkPoints.Count; i++) { OpenCVForUnityUtils.DrawFaceLandmark(rgbaMat, landmarkPoints [i], new Scalar(0, 255, 0, 255), 2); } } Texture2D texture = new Texture2D(rgbaMat.cols(), rgbaMat.rows(), TextureFormat.RGBA32, false); OpenCVForUnity.Utils.matToTexture2D(rgbaMat, texture); gameObject.transform.GetComponent <Renderer> ().material.mainTexture = texture; frontalFaceParam.Dispose(); rgbaMat.Dispose(); }
private void Run() { meshOverlay = this.GetComponent <TrackedMeshOverlay> (); shader_FadeID = Shader.PropertyToID("_Fade"); rectangleTracker = new RectangleTracker(); faceLandmarkDetector = new FaceLandmarkDetector(shape_predictor_68_face_landmarks_dat_filepath); frontalFaceParam = new FrontalFaceParam(); rgbMat = new Mat(); capture.open(couple_avi_filepath); if (capture.isOpened()) { Debug.Log("capture.isOpened() true"); } else { Debug.Log("capture.isOpened() false"); } Debug.Log("CAP_PROP_FORMAT: " + capture.get(Videoio.CAP_PROP_FORMAT)); Debug.Log("CV_CAP_PROP_PREVIEW_FORMAT: " + capture.get(Videoio.CV_CAP_PROP_PREVIEW_FORMAT)); Debug.Log("CAP_PROP_POS_MSEC: " + capture.get(Videoio.CAP_PROP_POS_MSEC)); Debug.Log("CAP_PROP_POS_FRAMES: " + capture.get(Videoio.CAP_PROP_POS_FRAMES)); Debug.Log("CAP_PROP_POS_AVI_RATIO: " + capture.get(Videoio.CAP_PROP_POS_AVI_RATIO)); Debug.Log("CAP_PROP_FRAME_COUNT: " + capture.get(Videoio.CAP_PROP_FRAME_COUNT)); Debug.Log("CAP_PROP_FPS: " + capture.get(Videoio.CAP_PROP_FPS)); Debug.Log("CAP_PROP_FRAME_WIDTH: " + capture.get(Videoio.CAP_PROP_FRAME_WIDTH)); Debug.Log("CAP_PROP_FRAME_HEIGHT: " + capture.get(Videoio.CAP_PROP_FRAME_HEIGHT)); texture = new Texture2D((int)(frameWidth), (int)(frameHeight), TextureFormat.RGBA32, false); gameObject.transform.localScale = new Vector3((float)frameWidth, (float)frameHeight, 1); float widthScale = (float)Screen.width / (float)frameWidth; float heightScale = (float)Screen.height / (float)frameHeight; if (widthScale < heightScale) { Camera.main.orthographicSize = ((float)frameWidth * (float)Screen.height / (float)Screen.width) / 2; } else { Camera.main.orthographicSize = (float)frameHeight / 2; } gameObject.GetComponent <Renderer> ().material.mainTexture = texture; meshOverlay.UpdateOverlayTransform(); grayMat = new Mat((int)frameHeight, (int)frameWidth, CvType.CV_8UC1); cascade = new CascadeClassifier(haarcascade_frontalface_alt_xml_filepath); if (cascade.empty()) { Debug.LogError("cascade file is not loaded.Please copy from “FaceTrackerSample/StreamingAssets/” to “Assets/StreamingAssets/” folder. "); } isShowingFaceRectsToggle.isOn = isShowingFaceRects; useDlibFaceDetecterToggle.isOn = useDlibFaceDetecter; isFilteringNonFrontalFacesToggle.isOn = isFilteringNonFrontalFaces; isShowingDebugFacePointsToggle.isOn = isShowingDebugFacePoints; OnChangeFaceMaskButton(); }