// Update is called once per frame void Update() { // loop play. if (capture.get(Videoio.CAP_PROP_POS_FRAMES) >= capture.get(Videoio.CAP_PROP_FRAME_COUNT)) { capture.set(Videoio.CAP_PROP_POS_FRAMES, 0); } if (capture.grab()) { capture.retrieve(rgbMat, 0); Imgproc.cvtColor(rgbMat, rgbMat, Imgproc.COLOR_BGR2RGB); //Debug.Log ("Mat toString " + rgbMat.ToString ()); // detect faces. List <OpenCVForUnity.Rect> detectResult = new List <OpenCVForUnity.Rect> (); if (useDlibFaceDetecter) { OpenCVForUnityUtils.SetImage(faceLandmarkDetector, rgbMat); List <UnityEngine.Rect> result = faceLandmarkDetector.Detect(); foreach (var unityRect in result) { detectResult.Add(new OpenCVForUnity.Rect((int)unityRect.x, (int)unityRect.y, (int)unityRect.width, (int)unityRect.height)); } } else { // convert image to greyscale. Imgproc.cvtColor(rgbMat, grayMat, Imgproc.COLOR_RGB2GRAY); using (Mat equalizeHistMat = new Mat()) using (MatOfRect faces = new MatOfRect()) { Imgproc.equalizeHist(grayMat, equalizeHistMat); cascade.detectMultiScale(equalizeHistMat, faces, 1.1f, 2, 0 | Objdetect.CASCADE_SCALE_IMAGE, new OpenCVForUnity.Size(equalizeHistMat.cols() * 0.15, equalizeHistMat.cols() * 0.15), new Size()); detectResult = faces.toList(); // adjust to Dilb's result. foreach (OpenCVForUnity.Rect r in detectResult) { r.y += (int)(r.height * 0.1f); } } } // face tracking. rectangleTracker.UpdateTrackedObjects(detectResult); List <TrackedRect> trackedRects = new List <TrackedRect> (); rectangleTracker.GetObjects(trackedRects, true); // create noise filter. foreach (var openCVRect in trackedRects) { if (openCVRect.state == TrackedState.NEW) { if (!lowPassFilterDict.ContainsKey(openCVRect.id)) { lowPassFilterDict.Add(openCVRect.id, new LowPassPointsFilter((int)faceLandmarkDetector.GetShapePredictorNumParts())); } if (!opticalFlowFilterDict.ContainsKey(openCVRect.id)) { opticalFlowFilterDict.Add(openCVRect.id, new OFPointsFilter((int)faceLandmarkDetector.GetShapePredictorNumParts())); } } else if (openCVRect.state == TrackedState.DELETED) { if (lowPassFilterDict.ContainsKey(openCVRect.id)) { lowPassFilterDict [openCVRect.id].Dispose(); lowPassFilterDict.Remove(openCVRect.id); } if (opticalFlowFilterDict.ContainsKey(openCVRect.id)) { opticalFlowFilterDict [openCVRect.id].Dispose(); opticalFlowFilterDict.Remove(openCVRect.id); } } } // create LUT texture. foreach (var openCVRect in trackedRects) { if (openCVRect.state == TrackedState.NEW) { faceMaskColorCorrector.CreateLUTTex(openCVRect.id); } else if (openCVRect.state == TrackedState.DELETED) { faceMaskColorCorrector.DeleteLUTTex(openCVRect.id); } } // detect face landmark points. OpenCVForUnityUtils.SetImage(faceLandmarkDetector, rgbMat); List <List <Vector2> > landmarkPoints = new List <List <Vector2> > (); for (int i = 0; i < trackedRects.Count; i++) { TrackedRect tr = trackedRects [i]; UnityEngine.Rect rect = new UnityEngine.Rect(tr.x, tr.y, tr.width, tr.height); List <Vector2> points = faceLandmarkDetector.DetectLandmark(rect); // apply noise filter. if (enableNoiseFilter) { if (tr.state > TrackedState.NEW && tr.state < TrackedState.DELETED) { opticalFlowFilterDict [tr.id].Process(rgbMat, points, points); lowPassFilterDict [tr.id].Process(rgbMat, points, points); } } landmarkPoints.Add(points); } // face masking. if (faceMaskTexture != null && landmarkPoints.Count >= 1) // Apply face masking between detected faces and a face mask image. { float maskImageWidth = faceMaskTexture.width; float maskImageHeight = faceMaskTexture.height; TrackedRect tr; for (int i = 0; i < trackedRects.Count; i++) { tr = trackedRects [i]; if (tr.state == TrackedState.NEW) { meshOverlay.CreateObject(tr.id, faceMaskTexture); } if (tr.state < TrackedState.DELETED) { MaskFace(meshOverlay, tr, landmarkPoints [i], faceLandmarkPointsInMask, maskImageWidth, maskImageHeight); if (enableColorCorrection) { CorrectFaceMaskColor(tr.id, faceMaskMat, rgbMat, faceLandmarkPointsInMask, landmarkPoints [i]); } } else if (tr.state == TrackedState.DELETED) { meshOverlay.DeleteObject(tr.id); } } } else if (landmarkPoints.Count >= 1) // Apply face masking between detected faces. { float maskImageWidth = texture.width; float maskImageHeight = texture.height; TrackedRect tr; for (int i = 0; i < trackedRects.Count; i++) { tr = trackedRects [i]; if (tr.state == TrackedState.NEW) { meshOverlay.CreateObject(tr.id, texture); } if (tr.state < TrackedState.DELETED) { MaskFace(meshOverlay, tr, landmarkPoints [i], landmarkPoints [0], maskImageWidth, maskImageHeight); if (enableColorCorrection) { CorrectFaceMaskColor(tr.id, rgbMat, rgbMat, landmarkPoints [0], landmarkPoints [i]); } } else if (tr.state == TrackedState.DELETED) { meshOverlay.DeleteObject(tr.id); } } } // draw face rects. if (displayFaceRects) { for (int i = 0; i < detectResult.Count; i++) { UnityEngine.Rect rect = new UnityEngine.Rect(detectResult [i].x, detectResult [i].y, detectResult [i].width, detectResult [i].height); OpenCVForUnityUtils.DrawFaceRect(rgbMat, rect, new Scalar(255, 0, 0, 255), 2); } for (int i = 0; i < trackedRects.Count; i++) { UnityEngine.Rect rect = new UnityEngine.Rect(trackedRects [i].x, trackedRects [i].y, trackedRects [i].width, trackedRects [i].height); OpenCVForUnityUtils.DrawFaceRect(rgbMat, rect, new Scalar(255, 255, 0, 255), 2); //Imgproc.putText (rgbaMat, " " + frontalFaceChecker.GetFrontalFaceAngles (landmarkPoints [i]), new Point (rect.xMin, rect.yMin - 10), Core.FONT_HERSHEY_SIMPLEX, 0.5, new Scalar (255, 255, 255, 255), 2, Imgproc.LINE_AA, false); //Imgproc.putText (rgbaMat, " " + frontalFaceChecker.GetFrontalFaceRate (landmarkPoints [i]), new Point (rect.xMin, rect.yMin - 10), Core.FONT_HERSHEY_SIMPLEX, 0.5, new Scalar (255, 255, 255, 255), 2, Imgproc.LINE_AA, false); } } // draw face points. if (displayDebugFacePoints) { for (int i = 0; i < landmarkPoints.Count; i++) { OpenCVForUnityUtils.DrawFaceLandmark(rgbMat, landmarkPoints [i], new Scalar(0, 255, 0, 255), 2); } } // display face mask image. if (faceMaskTexture != null && faceMaskMat != null) { if (displayFaceRects) { OpenCVForUnityUtils.DrawFaceRect(faceMaskMat, faceRectInMask, new Scalar(255, 0, 0, 255), 2); } if (displayDebugFacePoints) { OpenCVForUnityUtils.DrawFaceLandmark(faceMaskMat, faceLandmarkPointsInMask, new Scalar(0, 255, 0, 255), 2); } float scale = (rgbMat.width() / 4f) / faceMaskMat.width(); float tx = rgbMat.width() - faceMaskMat.width() * scale; float ty = 0.0f; Mat trans = new Mat(2, 3, CvType.CV_32F);//1.0, 0.0, tx, 0.0, 1.0, ty); trans.put(0, 0, scale); trans.put(0, 1, 0.0f); trans.put(0, 2, tx); trans.put(1, 0, 0.0f); trans.put(1, 1, scale); trans.put(1, 2, ty); Imgproc.warpAffine(faceMaskMat, rgbMat, trans, rgbMat.size(), Imgproc.INTER_LINEAR, Core.BORDER_TRANSPARENT, new Scalar(0)); if (displayFaceRects || displayDebugFacePointsToggle) { OpenCVForUnity.Utils.texture2DToMat(faceMaskTexture, faceMaskMat); } } // Imgproc.putText (rgbMat, "W:" + rgbMat.width () + " H:" + rgbMat.height () + " SO:" + Screen.orientation, new Point (5, rgbMat.rows () - 10), Core.FONT_HERSHEY_SIMPLEX, 0.5, new Scalar (255, 255, 255), 1, Imgproc.LINE_AA, false); OpenCVForUnity.Utils.fastMatToTexture2D(rgbMat, texture); } }
// Update is called once per frame void Update() { // loop play. if (capture.get(Videoio.CAP_PROP_POS_FRAMES) >= capture.get(Videoio.CAP_PROP_FRAME_COUNT)) { capture.set(Videoio.CAP_PROP_POS_FRAMES, 0); } if (capture.grab()) { capture.retrieve(rgbMat, 0); Imgproc.cvtColor(rgbMat, rgbMat, Imgproc.COLOR_BGR2RGB); //Debug.Log ("Mat toString " + rgbMat.ToString ()); // detect faces. List <OpenCVForUnity.Rect> detectResult = new List <OpenCVForUnity.Rect> (); if (useDlibFaceDetecter) { OpenCVForUnityUtils.SetImage(faceLandmarkDetector, rgbMat); List <UnityEngine.Rect> result = faceLandmarkDetector.Detect(); foreach (var unityRect in result) { detectResult.Add(new OpenCVForUnity.Rect((int)unityRect.x, (int)unityRect.y, (int)unityRect.width, (int)unityRect.height)); } } else { // convert image to greyscale. Imgproc.cvtColor(rgbMat, grayMat, Imgproc.COLOR_RGB2GRAY); using (Mat equalizeHistMat = new Mat()) using (MatOfRect faces = new MatOfRect()) { Imgproc.equalizeHist(grayMat, equalizeHistMat); cascade.detectMultiScale(equalizeHistMat, faces, 1.1f, 2, 0 | Objdetect.CASCADE_SCALE_IMAGE, new OpenCVForUnity.Size(equalizeHistMat.cols() * 0.15, equalizeHistMat.cols() * 0.15), new Size()); detectResult = faces.toList(); // adjust to Dilb's result. foreach (OpenCVForUnity.Rect r in detectResult) { r.y += (int)(r.height * 0.1f); } } } // face traking. rectangleTracker.UpdateTrackedObjects(detectResult); List <TrackedRect> trackedRects = new List <TrackedRect> (); rectangleTracker.GetObjects(trackedRects, true); // detect face landmark points. OpenCVForUnityUtils.SetImage(faceLandmarkDetector, rgbMat); List <List <Vector2> > landmarkPoints = new List <List <Vector2> > (); for (int i = 0; i < trackedRects.Count; i++) { TrackedRect tr = trackedRects [i]; UnityEngine.Rect rect = new UnityEngine.Rect(tr.x, tr.y, tr.width, tr.height); List <Vector2> points = faceLandmarkDetector.DetectLandmark(rect); landmarkPoints.Add(points); } // face masking. if (faceMaskTexture != null && landmarkPoints.Count >= 1) { OpenCVForUnity.Utils.texture2DToMat(faceMaskTexture, faceMaskMat); float imageWidth = meshOverlay.width; float imageHeight = meshOverlay.height; float maskImageWidth = faceMaskTexture.width; float maskImageHeight = faceMaskTexture.height; TrackedRect tr; TrackedMesh tm; for (int i = 0; i < trackedRects.Count; i++) { tr = trackedRects [i]; if (tr.state == TrackedState.NEW) { meshOverlay.CreateObject(tr.id, faceMaskTexture); } if (tr.state < TrackedState.DELETED) { tm = meshOverlay.GetObjectById(tr.id); Vector3[] vertices = tm.meshFilter.mesh.vertices; if (vertices.Length == landmarkPoints [i].Count) { for (int j = 0; j < vertices.Length; j++) { vertices [j].x = landmarkPoints [i] [j].x / imageWidth - 0.5f; vertices [j].y = 0.5f - landmarkPoints [i] [j].y / imageHeight; } } Vector2[] uv = tm.meshFilter.mesh.uv; if (uv.Length == faceLandmarkPointsInMask.Count) { for (int jj = 0; jj < uv.Length; jj++) { uv [jj].x = faceLandmarkPointsInMask [jj].x / maskImageWidth; uv [jj].y = (maskImageHeight - faceLandmarkPointsInMask [jj].y) / maskImageHeight; } } meshOverlay.UpdateObject(tr.id, vertices, null, uv); if (tr.numFramesNotDetected > 3) { tm.material.SetFloat(shader_FadeID, 1f); } else if (tr.numFramesNotDetected > 0 && tr.numFramesNotDetected <= 3) { tm.material.SetFloat(shader_FadeID, 0.3f + (0.7f / 4f) * tr.numFramesNotDetected); } else { tm.material.SetFloat(shader_FadeID, 0.3f); } // filter non frontal faces. if (filterNonFrontalFaces && frontalFaceChecker.GetFrontalFaceRate(landmarkPoints [i]) < frontalFaceRateLowerLimit) { tm.material.SetFloat(shader_FadeID, 1f); } } else if (tr.state == TrackedState.DELETED) { meshOverlay.DeleteObject(tr.id); } } } else if (landmarkPoints.Count >= 1) { float imageWidth = meshOverlay.width; float imageHeight = meshOverlay.height; float maskImageWidth = texture.width; float maskImageHeight = texture.height; TrackedRect tr; TrackedMesh tm; for (int i = 0; i < trackedRects.Count; i++) { tr = trackedRects [i]; if (tr.state == TrackedState.NEW) { meshOverlay.CreateObject(tr.id, texture); } if (tr.state < TrackedState.DELETED) { tm = meshOverlay.GetObjectById(tr.id); Vector3[] vertices = tm.meshFilter.mesh.vertices; if (vertices.Length == landmarkPoints [i].Count) { for (int j = 0; j < vertices.Length; j++) { vertices [j].x = landmarkPoints[i][j].x / imageWidth - 0.5f; vertices [j].y = 0.5f - landmarkPoints[i][j].y / imageHeight; } } Vector2[] uv = tm.meshFilter.mesh.uv; if (uv.Length == landmarkPoints [0].Count) { for (int jj = 0; jj < uv.Length; jj++) { uv [jj].x = landmarkPoints[0][jj].x / maskImageWidth; uv [jj].y = (maskImageHeight - landmarkPoints[0][jj].y) / maskImageHeight; } } meshOverlay.UpdateObject(tr.id, vertices, null, uv); if (tr.numFramesNotDetected > 3) { tm.material.SetFloat(shader_FadeID, 1f); } else if (tr.numFramesNotDetected > 0 && tr.numFramesNotDetected <= 3) { tm.material.SetFloat(shader_FadeID, 0.3f + (0.7f / 4f) * tr.numFramesNotDetected); } else { tm.material.SetFloat(shader_FadeID, 0.3f); } // filter non frontal faces. if (filterNonFrontalFaces && frontalFaceChecker.GetFrontalFaceRate(landmarkPoints [i]) < frontalFaceRateLowerLimit) { tm.material.SetFloat(shader_FadeID, 1f); } } else if (tr.state == TrackedState.DELETED) { meshOverlay.DeleteObject(tr.id); } } } // draw face rects. if (displayFaceRects) { for (int i = 0; i < detectResult.Count; i++) { UnityEngine.Rect rect = new UnityEngine.Rect(detectResult [i].x, detectResult [i].y, detectResult [i].width, detectResult [i].height); OpenCVForUnityUtils.DrawFaceRect(rgbMat, rect, new Scalar(255, 0, 0, 255), 2); } for (int i = 0; i < trackedRects.Count; i++) { UnityEngine.Rect rect = new UnityEngine.Rect(trackedRects [i].x, trackedRects [i].y, trackedRects [i].width, trackedRects [i].height); OpenCVForUnityUtils.DrawFaceRect(rgbMat, rect, new Scalar(255, 255, 0, 255), 2); //Imgproc.putText (rgbaMat, " " + frontalFaceChecker.GetFrontalFaceAngles (landmarkPoints [i]), new Point (rect.xMin, rect.yMin - 10), Core.FONT_HERSHEY_SIMPLEX, 0.5, new Scalar (255, 255, 255, 255), 2, Imgproc.LINE_AA, false); //Imgproc.putText (rgbaMat, " " + frontalFaceChecker.GetFrontalFaceRate (landmarkPoints [i]), new Point (rect.xMin, rect.yMin - 10), Core.FONT_HERSHEY_SIMPLEX, 0.5, new Scalar (255, 255, 255, 255), 2, Imgproc.LINE_AA, false); } } // draw face points. if (displayDebugFacePoints) { for (int i = 0; i < landmarkPoints.Count; i++) { OpenCVForUnityUtils.DrawFaceLandmark(rgbMat, landmarkPoints [i], new Scalar(0, 255, 0, 255), 2); } } // display face mask image. if (faceMaskTexture != null && faceMaskMat != null) { if (displayFaceRects) { OpenCVForUnityUtils.DrawFaceRect(faceMaskMat, faceRectInMask, new Scalar(255, 0, 0, 255), 2); } if (displayDebugFacePoints) { OpenCVForUnityUtils.DrawFaceLandmark(faceMaskMat, faceLandmarkPointsInMask, new Scalar(0, 255, 0, 255), 2); } float scale = (rgbMat.width() / 4f) / faceMaskMat.width(); float tx = rgbMat.width() - faceMaskMat.width() * scale; float ty = 0.0f; Mat trans = new Mat(2, 3, CvType.CV_32F);//1.0, 0.0, tx, 0.0, 1.0, ty); trans.put(0, 0, scale); trans.put(0, 1, 0.0f); trans.put(0, 2, tx); trans.put(1, 0, 0.0f); trans.put(1, 1, scale); trans.put(1, 2, ty); Imgproc.warpAffine(faceMaskMat, rgbMat, trans, rgbMat.size(), Imgproc.INTER_LINEAR, Core.BORDER_TRANSPARENT, new Scalar(0)); } Imgproc.putText(rgbMat, "W:" + rgbMat.width() + " H:" + rgbMat.height() + " SO:" + Screen.orientation, new Point(5, rgbMat.rows() - 10), Core.FONT_HERSHEY_SIMPLEX, 0.5, new Scalar(255, 255, 255), 1, Imgproc.LINE_AA, false); OpenCVForUnity.Utils.matToTexture2D(rgbMat, texture); } }
private static void DrawFaceLandmark(Mat imgMat, List <Vector2> points, Scalar color, int thickness) { if (points.Count == 73) // If landmark points of forehead exists. { for (int i = 1; i <= 16; ++i) { Imgproc.line(imgMat, new Point(points [i].x, points [i].y), new Point(points [i - 1].x, points [i - 1].y), color, thickness); } for (int i = 28; i <= 30; ++i) { Imgproc.line(imgMat, new Point(points [i].x, points [i].y), new Point(points [i - 1].x, points [i - 1].y), color, thickness); } for (int i = 18; i <= 21; ++i) { Imgproc.line(imgMat, new Point(points [i].x, points [i].y), new Point(points [i - 1].x, points [i - 1].y), color, thickness); } for (int i = 23; i <= 26; ++i) { Imgproc.line(imgMat, new Point(points [i].x, points [i].y), new Point(points [i - 1].x, points [i - 1].y), color, thickness); } for (int i = 31; i <= 35; ++i) { Imgproc.line(imgMat, new Point(points [i].x, points [i].y), new Point(points [i - 1].x, points [i - 1].y), color, thickness); } Imgproc.line(imgMat, new Point(points [30].x, points [30].y), new Point(points [35].x, points [35].y), color, thickness); for (int i = 37; i <= 41; ++i) { Imgproc.line(imgMat, new Point(points [i].x, points [i].y), new Point(points [i - 1].x, points [i - 1].y), color, thickness); } Imgproc.line(imgMat, new Point(points [36].x, points [36].y), new Point(points [41].x, points [41].y), color, thickness); for (int i = 43; i <= 47; ++i) { Imgproc.line(imgMat, new Point(points [i].x, points [i].y), new Point(points [i - 1].x, points [i - 1].y), color, thickness); } Imgproc.line(imgMat, new Point(points [42].x, points [42].y), new Point(points [47].x, points [47].y), color, thickness); for (int i = 49; i <= 59; ++i) { Imgproc.line(imgMat, new Point(points [i].x, points [i].y), new Point(points [i - 1].x, points [i - 1].y), color, thickness); } Imgproc.line(imgMat, new Point(points [48].x, points [48].y), new Point(points [59].x, points [59].y), color, thickness); for (int i = 61; i <= 67; ++i) { Imgproc.line(imgMat, new Point(points [i].x, points [i].y), new Point(points [i - 1].x, points [i - 1].y), color, thickness); } Imgproc.line(imgMat, new Point(points [60].x, points [60].y), new Point(points [67].x, points [67].y), color, thickness); for (int i = 69; i <= 72; ++i) { Imgproc.line(imgMat, new Point(points [i].x, points [i].y), new Point(points [i - 1].x, points [i - 1].y), new Scalar(0, 255, 0, 255), thickness); } } else { OpenCVForUnityUtils.DrawFaceLandmark(imgMat, points, color, thickness); } }
private void Run() { meshOverlay = this.GetComponent <TrackedMeshOverlay> (); displayFaceRectsToggle.isOn = displayFaceRects; useDlibFaceDetecterToggle.isOn = useDlibFaceDetecter; enableColorCorrectionToggle.isOn = enableColorCorrection; filterNonFrontalFacesToggle.isOn = filterNonFrontalFaces; displayDebugFacePointsToggle.isOn = displayDebugFacePoints; if (imgTexture == null) { imgTexture = Resources.Load("family") as Texture2D; } gameObject.transform.localScale = new Vector3(imgTexture.width, imgTexture.height, 1); Debug.Log("Screen.width " + Screen.width + " Screen.height " + Screen.height + " Screen.orientation " + Screen.orientation); meshOverlay.UpdateOverlayTransform(gameObject.transform); meshOverlay.Reset(); float width = 0; float height = 0; width = gameObject.transform.localScale.x; height = gameObject.transform.localScale.y; float widthScale = (float)Screen.width / width; float heightScale = (float)Screen.height / height; if (widthScale < heightScale) { Camera.main.orthographicSize = (width * (float)Screen.height / (float)Screen.width) / 2; } else { Camera.main.orthographicSize = height / 2; } Mat rgbaMat = new Mat(imgTexture.height, imgTexture.width, CvType.CV_8UC4); OpenCVForUnity.Utils.texture2DToMat(imgTexture, rgbaMat); Debug.Log("rgbaMat ToString " + rgbaMat.ToString()); if (faceLandmarkDetector == null) { faceLandmarkDetector = new FaceLandmarkDetector(sp_human_face_68_dat_filepath); } faceMaskColorCorrector = faceMaskColorCorrector ?? new FaceMaskColorCorrector(); FrontalFaceChecker frontalFaceChecker = new FrontalFaceChecker(width, height); // detect faces. List <OpenCVForUnity.Rect> detectResult = new List <OpenCVForUnity.Rect> (); if (useDlibFaceDetecter) { OpenCVForUnityUtils.SetImage(faceLandmarkDetector, rgbaMat); List <UnityEngine.Rect> result = faceLandmarkDetector.Detect(); foreach (var unityRect in result) { detectResult.Add(new OpenCVForUnity.Rect((int)unityRect.x, (int)unityRect.y, (int)unityRect.width, (int)unityRect.height)); } } else { if (cascade == null) { cascade = new CascadeClassifier(haarcascade_frontalface_alt_xml_filepath); } // if (cascade.empty ()) { // Debug.LogError ("cascade file is not loaded.Please copy from “FaceTrackerExample/StreamingAssets/” to “Assets/StreamingAssets/” folder. "); // } // convert image to greyscale. Mat gray = new Mat(); Imgproc.cvtColor(rgbaMat, gray, Imgproc.COLOR_RGBA2GRAY); MatOfRect faces = new MatOfRect(); Imgproc.equalizeHist(gray, gray); cascade.detectMultiScale(gray, faces, 1.1f, 2, 0 | Objdetect.CASCADE_SCALE_IMAGE, new OpenCVForUnity.Size(gray.cols() * 0.05, gray.cols() * 0.05), new Size()); //Debug.Log ("faces " + faces.dump ()); detectResult = faces.toList(); // adjust to Dilb's result. foreach (OpenCVForUnity.Rect r in detectResult) { r.y += (int)(r.height * 0.1f); } gray.Dispose(); } // detect face landmark points. OpenCVForUnityUtils.SetImage(faceLandmarkDetector, rgbaMat); List <List <Vector2> > landmarkPoints = new List <List <Vector2> > (); foreach (var openCVRect in detectResult) { UnityEngine.Rect rect = new UnityEngine.Rect(openCVRect.x, openCVRect.y, openCVRect.width, openCVRect.height); Debug.Log("face : " + rect); //OpenCVForUnityUtils.DrawFaceRect(imgMat, rect, new Scalar(255, 0, 0, 255), 2); List <Vector2> points = faceLandmarkDetector.DetectLandmark(rect); //OpenCVForUnityUtils.DrawFaceLandmark(imgMat, points, new Scalar(0, 255, 0, 255), 2); landmarkPoints.Add(points); } // mask faces. int[] face_nums = new int[landmarkPoints.Count]; for (int i = 0; i < face_nums.Length; i++) { face_nums [i] = i; } face_nums = face_nums.OrderBy(i => System.Guid.NewGuid()).ToArray(); float imageWidth = meshOverlay.width; float imageHeight = meshOverlay.height; float maskImageWidth = imgTexture.width; float maskImageHeight = imgTexture.height; TrackedMesh tm; for (int i = 0; i < face_nums.Length; i++) { meshOverlay.CreateObject(i, imgTexture); tm = meshOverlay.GetObjectById(i); Vector3[] vertices = tm.meshFilter.mesh.vertices; if (vertices.Length == landmarkPoints [face_nums [i]].Count) { for (int j = 0; j < vertices.Length; j++) { vertices [j].x = landmarkPoints [face_nums [i]] [j].x / imageWidth - 0.5f; vertices [j].y = 0.5f - landmarkPoints [face_nums [i]] [j].y / imageHeight; } } Vector2[] uv = tm.meshFilter.mesh.uv; if (uv.Length == landmarkPoints [face_nums [0]].Count) { for (int jj = 0; jj < uv.Length; jj++) { uv [jj].x = landmarkPoints [face_nums [0]] [jj].x / maskImageWidth; uv [jj].y = (maskImageHeight - landmarkPoints [face_nums [0]] [jj].y) / maskImageHeight; } } meshOverlay.UpdateObject(i, vertices, null, uv); if (enableColorCorrection) { faceMaskColorCorrector.CreateLUTTex(i); Texture2D LUTTex = faceMaskColorCorrector.UpdateLUTTex(i, rgbaMat, rgbaMat, landmarkPoints [face_nums [0]], landmarkPoints [face_nums [i]]); tm.sharedMaterial.SetTexture("_LUTTex", LUTTex); tm.sharedMaterial.SetFloat("_ColorCorrection", 1f); } else { tm.sharedMaterial.SetFloat("_ColorCorrection", 0f); } // filter non frontal faces. if (filterNonFrontalFaces && frontalFaceChecker.GetFrontalFaceRate(landmarkPoints [i]) < frontalFaceRateLowerLimit) { tm.sharedMaterial.SetFloat("_Fade", 1f); } else { tm.sharedMaterial.SetFloat("_Fade", 0.3f); } } // draw face rects. if (displayFaceRects) { int ann = face_nums[0]; UnityEngine.Rect rect_ann = new UnityEngine.Rect(detectResult [ann].x, detectResult [ann].y, detectResult [ann].width, detectResult [ann].height); OpenCVForUnityUtils.DrawFaceRect(rgbaMat, rect_ann, new Scalar(255, 255, 0, 255), 2); int bob = 0; for (int i = 1; i < face_nums.Length; i++) { bob = face_nums [i]; UnityEngine.Rect rect_bob = new UnityEngine.Rect(detectResult [bob].x, detectResult [bob].y, detectResult [bob].width, detectResult [bob].height); OpenCVForUnityUtils.DrawFaceRect(rgbaMat, rect_bob, new Scalar(255, 0, 0, 255), 2); } } // draw face points. if (displayDebugFacePoints) { for (int i = 0; i < landmarkPoints.Count; i++) { OpenCVForUnityUtils.DrawFaceLandmark(rgbaMat, landmarkPoints [i], new Scalar(0, 255, 0, 255), 2); } } Texture2D texture = new Texture2D(rgbaMat.cols(), rgbaMat.rows(), TextureFormat.RGBA32, false); OpenCVForUnity.Utils.matToTexture2D(rgbaMat, texture); gameObject.transform.GetComponent <Renderer> ().material.mainTexture = texture; frontalFaceChecker.Dispose(); rgbaMat.Dispose(); }