void Run() { Mat rgbaMat = new Mat(imgTexture.height, imgTexture.width, CvType.CV_8UC4); OpenCVForUnity.Utils.texture2DToMat(imgTexture, rgbaMat); //Mat dstMat = Imgcodecs.imread(Application.dataPath + "/Resources/changer.jpg"); //Imgproc.cvtColor(dstMat, dstMat, Imgproc.COLOR_BGR2RGB); FrontalFaceChecker frontalFaceChecker = new FrontalFaceChecker(imgTexture.width, imgTexture.height); //检测正面脸,范围整个t2d //1、人脸检测。两种检测人脸rect方法,toggle切换 List <OpenCVForUnity.Rect> detectResult = new List <OpenCVForUnity.Rect>(); if (useDlibFaceDetecter) { OpenCVForUnityUtils.SetImage(faceLandmarkDetector, rgbaMat); List <UnityEngine.Rect> result = faceLandmarkDetector.Detect(); foreach (UnityEngine.Rect unityRect in result) { detectResult.Add(new OpenCVForUnity.Rect((int)unityRect.x, (int)unityRect.y, (int)unityRect.width, (int)unityRect.height)); } } else { cascade = new CascadeClassifier(haarcascade_frontalface_alt_xml_filepath); Mat gray = new Mat(); Imgproc.cvtColor(rgbaMat, gray, Imgproc.COLOR_RGBA2GRAY); MatOfRect faces = new MatOfRect(); Imgproc.equalizeHist(gray, gray); cascade.detectMultiScale(gray, faces, 1.1f, 2, 0 | Objdetect.CASCADE_SCALE_IMAGE, new OpenCVForUnity.Size(gray.cols() * 0.05, gray.cols() * 0.05), new Size()); detectResult = faces.toList(); // adjust to Dilb's result. foreach (OpenCVForUnity.Rect r in detectResult) { r.y += (int)(r.height * 0.1f); } gray.Dispose(); } //0-拷贝到->1,靠list中排序,改变顺序 detectResult.Sort(delegate(OpenCVForUnity.Rect x, OpenCVForUnity.Rect y) { return(x.x.CompareTo(y.x)); //顺序与delegate中一致,升序排列 //return y.x.CompareTo(x.x); //顺序与delegate中不同,降序排列 }); //Debug.Log("[0]" + detectResult[0].x + ", [1]" + detectResult[1].x); //2、关键点定位。通过上一步的rect,检测人脸标记点landmark OpenCVForUnityUtils.SetImage(faceLandmarkDetector, rgbaMat); List <List <Vector2> > landmarkPoints = new List <List <Vector2> >(); //TODO...只检测一张脸,另一张改成UV图,手动输入68个points /* * foreach (OpenCVForUnity.Rect openCVRect in detectResult) * { * UnityEngine.Rect rect = new UnityEngine.Rect(openCVRect.x, openCVRect.y, openCVRect.width, openCVRect.height); * //Debug.Log("face : " + rect); * * OpenCVForUnityUtils.DrawFaceRect(rgbaMat, rect, new Scalar(255, 0, 0, 255), 2); //画框rect * Imgproc.circle(rgbaMat, new Point(rect.x, rect.y), 0, new Scalar(0, 255, 0, 255), 5, Imgproc.LINE_8, 0); * Imgproc.putText(rgbaMat, rect.x + "x" + rect.y, new Point(rect.x + 5, rect.y -5), 1, 1, new Scalar(0, 255, 0, 255)); * * List<Vector2> points = faceLandmarkDetector.DetectLandmark(rect); * //OpenCVForUnityUtils.DrawFaceLandmark(rgbaMat, points, new Scalar(0, 255, 0, 255), 2); //画框landmark * landmarkPoints.Add(points); * } */ //检测一张照片的脸的landmark点 OpenCVForUnity.Rect openCVRect = detectResult[0]; UnityEngine.Rect rect = new UnityEngine.Rect(openCVRect.x, openCVRect.y, openCVRect.width, openCVRect.height); //OpenCVForUnityUtils.DrawFaceRect(rgbaMat, rect, new Scalar(255, 0, 0, 255), 2); //画框rect //Imgproc.circle(rgbaMat, new Point(rect.x, rect.y), 0, new Scalar(0, 255, 0, 255), 5, Imgproc.LINE_8, 0); //Imgproc.putText(rgbaMat, rect.x + "x" + rect.y, new Point(rect.x + 5, rect.y - 5), 1, 1, new Scalar(0, 255, 0, 255)); List <Vector2> points = faceLandmarkDetector.DetectLandmark(rect); //通过检测器从rect中提取point //OpenCVForUnityUtils.DrawFaceLandmark(rgbaMat, points, new Scalar(0, 255, 0, 255), 2); //画框landmark landmarkPoints.Add(points); string log = ""; for (int i = 0; i < points.Count; i++) { //Debug.Log("[" + i + "] " + points[i]); log += "new Vector2(" + ((int)points[i].x + 500) + "," + (int)points[i].y + "),\n"; //Imgproc.circle(rgbaMat, new Point(10, 10), 0, new Scalar(255, 0, 0, 255), 4, Imgproc.LINE_8, 0); //检查原点 //Imgproc.putText(rgbaMat, i.ToString(), new Point(landmarkPoints[0][i].x, landmarkPoints[0][i].y), 1, 1, new Scalar(0, 255, 0, 255)); //Imgproc.circle(rgbaMat, new Point(landmarkPoints[0][i].x, landmarkPoints[0][i].y), 0, new Scalar(0, 255, 0, 255), 2, Imgproc.LINE_8, 0); //绘制68点 //Imgproc.putText(rgbaMat, rect.x + "x" + rect.y, new Point(rect.x + 5, rect.y - 5), 1, 1, new Scalar(0, 255, 0, 255)); //Imgproc.putText(rgbaMat, rect.x + "x" + rect.y, new Point(rect.x + 5, rect.y - 5), 1, 1, new Scalar(0, 255, 0, 255)); /* * * * * * * * * * * * * * jaw; // [0-16] * * rightBrow; // [17-21] * * leftBrow; // [22-26] * * nose; // [27-35] * * rightEye; // [36-41] * * leftEye; // [42-47] * * mouth; // [48-59] * * mouth2; // [60-67] * * * * * * * * * * * * * */ /* * if (i >= 0 && i <= 16) * { * Imgproc.circle(rgbaMat, new Point(points[i].x, points[i].y), 0, new Scalar(255, 0, 0, 255), 2, Imgproc.LINE_8, 0); //红 * } * else if (i >= 17 && i <= 21) * { * Imgproc.circle(rgbaMat, new Point(points[i].x, points[i].y), 0, new Scalar(255, 127, 0, 255), 2, Imgproc.LINE_8, 0); //橙 * } * else if (i >= 22 && i <= 26) * { * Imgproc.circle(rgbaMat, new Point(points[i].x, points[i].y), 0, new Scalar(255, 255, 0, 255), 2, Imgproc.LINE_8, 0); //黄 * } * else if (i >= 27 && i <= 35) * { * Imgproc.circle(rgbaMat, new Point(points[i].x, points[i].y), 0, new Scalar(0, 255, 0, 255), 2, Imgproc.LINE_8, 0); //绿 * } * else if (i >= 36 && i <= 21) * { * Imgproc.circle(rgbaMat, new Point(points[i].x, points[i].y), 0, new Scalar(0, 255, 170, 255), 2, Imgproc.LINE_8, 0); //青 * } * else if (i >= 42 && i <= 47) * { * Imgproc.circle(rgbaMat, new Point(points[i].x, points[i].y), 0, new Scalar(0, 0, 255, 255), 2, Imgproc.LINE_8, 0); //蓝 * } * else if (i >= 48 && i <= 59) * { * Imgproc.circle(rgbaMat, new Point(points[i].x, points[i].y), 0, new Scalar(128, 255, 0, 255), 2, Imgproc.LINE_8, 0); //紫 * } * else if (i >= 60 && i <= 67) * { * Imgproc.circle(rgbaMat, new Point(points[i].x, points[i].y), 0, new Scalar(255, 180, 255, 255), 2, Imgproc.LINE_8, 0); //粉红 * } */ } //Debug.Log(log); //手写一张UV的landmark点 List <Vector2> uvs = new List <Vector2>() { //0-7 new Vector2(583, 210), //0 new Vector2(584, 250), //1 new Vector2(585, 305), //2 new Vector2(600, 360), //3 new Vector2(630, 410), //4 new Vector2(670, 425), //5 new Vector2(710, 440), //6 new Vector2(740, 445), //7 //8 new Vector2(770, 450), //8+轴 //9-16 new Vector2(800, 445), //9 new Vector2(830, 440), //10 new Vector2(870, 425), //11 new Vector2(910, 410), //12 new Vector2(940, 360), //13 new Vector2(955, 305), //14 new Vector2(956, 250), //15 new Vector2(957, 210), //16 //17-21 leftBrow new Vector2(655, 165), //17 new Vector2(680, 155), //18 new Vector2(710, 160), //19 new Vector2(730, 170), //20 new Vector2(750, 190), //21 //22-26 rightBrow new Vector2(790, 190), //22 new Vector2(810, 170), //23 new Vector2(830, 160), //24 new Vector2(860, 155), //25 new Vector2(885, 165), //26 //27-30 nose竖 new Vector2(770, 220), //27 new Vector2(770, 250), //28 new Vector2(770, 275), //29 new Vector2(770, 300), //30 //31-35 nose横 new Vector2(740, 312), //31 new Vector2(755, 316), //32 new Vector2(770, 320), //33 new Vector2(785, 316), //34 new Vector2(800, 312), //35 //36-41 leftEye new Vector2(670, 215), //36 new Vector2(690, 200), //37 new Vector2(715, 205), //38 new Vector2(730, 225), //39 new Vector2(710, 230), //40 new Vector2(690, 227), //41 //42-47 rightEye new Vector2(810, 225), //42 new Vector2(825, 205), //43 new Vector2(850, 200), //44 new Vector2(870, 215), //45 new Vector2(855, 227), //46 new Vector2(830, 230), //47 //48-59 mouth new Vector2(720, 360), //48-l new Vector2(735, 355), //49 new Vector2(750, 350), //50 new Vector2(770, 352), //51+u new Vector2(790, 350), //52 new Vector2(805, 355), //53 new Vector2(820, 360), //54-r new Vector2(805, 375), //55 new Vector2(790, 382), //56 new Vector2(770, 380), //57+d new Vector2(750, 382), //58 new Vector2(735, 375), //59 //60-67 mouth2 new Vector2(730, 365), //60-l new Vector2(750, 357), //61 new Vector2(770, 354), //62+u new Vector2(790, 357), //63 new Vector2(810, 365), //64-r new Vector2(790, 370), //65 new Vector2(770, 375), //66+d new Vector2(750, 370), //67 }; landmarkPoints.Add(uvs); for (int i = 0; i < landmarkPoints[1].Count; i++) { //Imgproc.putText(rgbaMat, i.ToString(), new Point(landmarkPoints[1][i].x, landmarkPoints[1][i].y), 1, 1, new Scalar(255, 0, 0, 255)); /* * * * * * * * * * * * * * jaw; // [0-16] * * rightBrow; // [17-21] * * leftBrow; // [22-26] * * nose; // [27-35] * * rightEye; // [36-41] * * leftEye; // [42-47] * * mouth; // [48-59] * * mouth2; // [60-67] * * * * * * * * * * * * * */ /* * if (i >= 0 && i <= 16) * { * Imgproc.circle(rgbaMat, new Point(landmarkPoints[1][i].x, landmarkPoints[1][i].y), 0, new Scalar(255, 0, 0, 255), 5, Imgproc.LINE_8, 0); //红 * } * else if (i >= 17 && i <= 21) * { * Imgproc.circle(rgbaMat, new Point(landmarkPoints[1][i].x, landmarkPoints[1][i].y), 0, new Scalar(255, 0, 0, 255), 5, Imgproc.LINE_8, 0); //橙 * } * else if (i >= 22 && i <= 26) * { * Imgproc.circle(rgbaMat, new Point(landmarkPoints[1][i].x, landmarkPoints[1][i].y), 0, new Scalar(255, 0, 0, 255), 5, Imgproc.LINE_8, 0); //黄 * } * else if (i >= 27 && i <= 35) * { * Imgproc.circle(rgbaMat, new Point(landmarkPoints[1][i].x, landmarkPoints[1][i].y), 0, new Scalar(255, 0, 0, 255), 5, Imgproc.LINE_8, 0); //绿 * } * else if (i >= 36 && i <= 21) * { * Imgproc.circle(rgbaMat, new Point(landmarkPoints[1][i].x, landmarkPoints[1][i].y), 0, new Scalar(255, 0, 0, 255), 5, Imgproc.LINE_8, 0); //青 * } * else if (i >= 42 && i <= 47) * { * Imgproc.circle(rgbaMat, new Point(landmarkPoints[1][i].x, landmarkPoints[1][i].y), 0, new Scalar(255, 0, 0, 255), 5, Imgproc.LINE_8, 0); //蓝 * } * else if (i >= 48 && i <= 59) * { * Imgproc.circle(rgbaMat, new Point(landmarkPoints[1][i].x, landmarkPoints[1][i].y), 0, new Scalar(255, 0, 0, 255), 5, Imgproc.LINE_8, 0); //紫 * } * else if (i >= 60 && i <= 67) * { * //Imgproc.circle(rgbaMat, new Point(landmarkPoints[1][i].x, landmarkPoints[1][i].y), 0, new Scalar(255, 0, 0, 255), 5, Imgproc.LINE_8, 0); //粉红 * } */ } //过滤非正面的脸 if (filterNonFrontalFaces) { for (int i = 0; i < landmarkPoints.Count; i++) { if (frontalFaceChecker.GetFrontalFaceRate(landmarkPoints[i]) < frontalFaceRateLowerLimit) //阈值0~1 { detectResult.RemoveAt(i); landmarkPoints.RemoveAt(i); i--; } } } //1张脸时只提取关键点,2张脸才换脸 if (landmarkPoints.Count == 2) { //开始换脸 DlibFaceChanger faceChanger = new DlibFaceChanger(); faceChanger.isShowingDebugFacePoints = displayDebugFacePoints; faceChanger.SetTargetImage(rgbaMat); //目标Mat faceChanger.AddFaceChangeData(rgbaMat, landmarkPoints[0], landmarkPoints[1], 0.9f); //源Mat,从0拷贝到1 faceChanger.ChangeFace(); faceChanger.Dispose(); } frontalFaceChecker.Dispose(); //背景图 Texture2D t2d = new Texture2D(rgbaMat.width(), rgbaMat.height(), TextureFormat.RGBA32, false); OpenCVForUnity.Utils.matToTexture2D(rgbaMat, t2d); Sprite sp = Sprite.Create(t2d, new UnityEngine.Rect(0, 0, t2d.width, t2d.height), Vector2.zero); srcImage.sprite = sp; srcImage.preserveAspect = true; Mat dstMat = new Mat(rgbaMat, new OpenCVForUnity.Rect(rgbaMat.width() / 2, 0, rgbaMat.width() / 2, rgbaMat.height())); Texture2D dst_t2d = new Texture2D(dstMat.width(), dstMat.height(), TextureFormat.RGBA32, false); OpenCVForUnity.Utils.matToTexture2D(dstMat, dst_t2d); mesh.materials[1].mainTexture = dst_t2d; rgbaMat.Dispose(); }
// Update is called once per frame void Update() { if (webCamTextureToMatHelper.IsPlaying() && webCamTextureToMatHelper.DidUpdateThisFrame()) { Mat rgbaMat = webCamTextureToMatHelper.GetMat(); // detect faces. List <Rect> detectResult; DetectFaces(rgbaMat, out detectResult, useDlibFaceDetecter); // face tracking. List <TrackedRect> trackedRects = new List <TrackedRect>(); rectangleTracker.UpdateTrackedObjects(detectResult); rectangleTracker.GetObjects(trackedRects, true); // create noise filter. foreach (var openCVRect in trackedRects) { if (openCVRect.state == TrackedState.NEW) { if (!lowPassFilterDict.ContainsKey(openCVRect.id)) { lowPassFilterDict.Add(openCVRect.id, new LowPassPointsFilter((int)faceLandmarkDetector.GetShapePredictorNumParts())); } if (!opticalFlowFilterDict.ContainsKey(openCVRect.id)) { opticalFlowFilterDict.Add(openCVRect.id, new OFPointsFilter((int)faceLandmarkDetector.GetShapePredictorNumParts())); } } else if (openCVRect.state == TrackedState.DELETED) { if (lowPassFilterDict.ContainsKey(openCVRect.id)) { lowPassFilterDict[openCVRect.id].Dispose(); lowPassFilterDict.Remove(openCVRect.id); } if (opticalFlowFilterDict.ContainsKey(openCVRect.id)) { opticalFlowFilterDict[openCVRect.id].Dispose(); opticalFlowFilterDict.Remove(openCVRect.id); } } } // detect face landmark points. OpenCVForUnityUtils.SetImage(faceLandmarkDetector, rgbaMat); List <List <Vector2> > landmarkPoints = new List <List <Vector2> >(); foreach (var openCVRect in trackedRects) { if (openCVRect.state > TrackedState.NEW_DISPLAYED && openCVRect.state < TrackedState.NEW_HIDED) { UnityEngine.Rect rect = new UnityEngine.Rect(openCVRect.x, openCVRect.y, openCVRect.width, openCVRect.height); List <Vector2> points = faceLandmarkDetector.DetectLandmark(rect); // apply noise filter. if (enableNoiseFilter) { opticalFlowFilterDict[openCVRect.id].Process(rgbaMat, points, points); lowPassFilterDict[openCVRect.id].Process(rgbaMat, points, points); } landmarkPoints.Add(points); } } // filter non frontal faces. if (filterNonFrontalFaces) { for (int i = 0; i < landmarkPoints.Count; i++) { if (frontalFaceChecker.GetFrontalFaceRate(landmarkPoints[i]) < frontalFaceRateLowerLimit) { trackedRects.RemoveAt(i); landmarkPoints.RemoveAt(i); i--; } } } // face changing. if (faceMaskTexture != null && landmarkPoints.Count >= 1) { // Apply face changing between detected faces and a mask image. if (detectedFaceRect.width == 0.0f || detectedFaceRect.height == 0.0f) { DetectFaces(faceMaskMat, out detectResult, useDlibFaceDetecter); if (detectResult.Count >= 1) { detectedFaceRect = new UnityEngine.Rect(detectResult[0].x, detectResult[0].y, detectResult[0].width, detectResult[0].height); } } if (detectedFaceRect.width > 0 || detectedFaceRect.height > 0) { OpenCVForUnityUtils.SetImage(faceLandmarkDetector, faceMaskMat); List <Vector2> souseLandmarkPoint = faceLandmarkDetector.DetectLandmark(detectedFaceRect); faceChanger.SetTargetImage(rgbaMat); for (int i = 0; i < landmarkPoints.Count; i++) { faceChanger.AddFaceChangeData(faceMaskMat, souseLandmarkPoint, landmarkPoints[i], 1); } faceChanger.ChangeFace(); if (displayFaceRects) { OpenCVForUnityUtils.DrawFaceRect(faceMaskMat, detectedFaceRect, new Scalar(255, 0, 0, 255), 2); } } } else if (landmarkPoints.Count >= 2) { // Apply face changing between detected faces. faceChanger.SetTargetImage(rgbaMat); for (int i = 1; i < landmarkPoints.Count; i++) { faceChanger.AddFaceChangeData(rgbaMat, landmarkPoints[0], landmarkPoints[i], 1); } faceChanger.ChangeFace(); } // draw face rects. if (displayFaceRects) { for (int i = 0; i < trackedRects.Count; i++) { Rect openCVRect = trackedRects[i]; UnityEngine.Rect rect = new UnityEngine.Rect(openCVRect.x, openCVRect.y, openCVRect.width, openCVRect.height); OpenCVForUnityUtils.DrawFaceRect(rgbaMat, rect, new Scalar(255, 0, 0, 255), 2); //Imgproc.putText (rgbaMat, " " + frontalFaceChecker.GetFrontalFaceAngles (landmarkPoints [i]), new Point (rect.xMin, rect.yMin - 10), Imgproc.FONT_HERSHEY_SIMPLEX, 0.5, new Scalar (255, 255, 255, 255), 2, Imgproc.LINE_AA, false); //Imgproc.putText (rgbaMat, " " + frontalFaceChecker.GetFrontalFaceRate (landmarkPoints [i]), new Point (rect.xMin, rect.yMin - 10), Imgproc.FONT_HERSHEY_SIMPLEX, 0.5, new Scalar (255, 255, 255, 255), 2, Imgproc.LINE_AA, false); } } // display face mask image. if (faceMaskMat != null) { float scale = (rgbaMat.width() / 4f) / faceMaskMat.width(); float tx = rgbaMat.width() - faceMaskMat.width() * scale; float ty = 0.0f; Mat trans = new Mat(2, 3, CvType.CV_32F);//1.0, 0.0, tx, 0.0, 1.0, ty); trans.put(0, 0, scale); trans.put(0, 1, 0.0f); trans.put(0, 2, tx); trans.put(1, 0, 0.0f); trans.put(1, 1, scale); trans.put(1, 2, ty); Imgproc.warpAffine(faceMaskMat, rgbaMat, trans, rgbaMat.size(), Imgproc.INTER_LINEAR, Core.BORDER_TRANSPARENT, new Scalar(0)); if (displayFaceRects || displayDebugFacePoints) { OpenCVForUnity.UnityUtils.Utils.texture2DToMat(faceMaskTexture, faceMaskMat); } } //Imgproc.putText (rgbaMat, "W:" + rgbaMat.width () + " H:" + rgbaMat.height () + " SO:" + Screen.orientation, new Point (5, rgbaMat.rows () - 10), Imgproc.FONT_HERSHEY_SIMPLEX, 0.5, new Scalar (255, 255, 255, 255), 1, Imgproc.LINE_AA, false); OpenCVForUnity.UnityUtils.Utils.fastMatToTexture2D(rgbaMat, texture); } }
private void Run() { displayFaceRectsToggle.isOn = displayFaceRects; useDlibFaceDetecterToggle.isOn = useDlibFaceDetecter; filterNonFrontalFacesToggle.isOn = filterNonFrontalFaces; displayDebugFacePointsToggle.isOn = displayDebugFacePoints; if (imgTexture == null) { imgTexture = Resources.Load("family") as Texture2D; } gameObject.transform.localScale = new Vector3(imgTexture.width, imgTexture.height, 1); Debug.Log("Screen.width " + Screen.width + " Screen.height " + Screen.height + " Screen.orientation " + Screen.orientation); float width = 0; float height = 0; width = gameObject.transform.localScale.x; height = gameObject.transform.localScale.y; float widthScale = (float)Screen.width / width; float heightScale = (float)Screen.height / height; if (widthScale < heightScale) { Camera.main.orthographicSize = (width * (float)Screen.height / (float)Screen.width) / 2; } else { Camera.main.orthographicSize = height / 2; } Mat rgbaMat = new Mat(imgTexture.height, imgTexture.width, CvType.CV_8UC4); OpenCVForUnity.UnityUtils.Utils.texture2DToMat(imgTexture, rgbaMat); Debug.Log("rgbaMat ToString " + rgbaMat.ToString()); if (faceLandmarkDetector == null) { faceLandmarkDetector = new FaceLandmarkDetector(sp_human_face_68_dat_filepath); } FrontalFaceChecker frontalFaceChecker = new FrontalFaceChecker(width, height); // detect faces. List <Rect> detectResult = new List <Rect>(); if (useDlibFaceDetecter) { OpenCVForUnityUtils.SetImage(faceLandmarkDetector, rgbaMat); List <UnityEngine.Rect> result = faceLandmarkDetector.Detect(); foreach (var unityRect in result) { detectResult.Add(new Rect((int)unityRect.x, (int)unityRect.y, (int)unityRect.width, (int)unityRect.height)); } } else { if (cascade == null) { cascade = new CascadeClassifier(haarcascade_frontalface_alt_xml_filepath); } //if (cascade.empty ()) { // Debug.LogError ("cascade file is not loaded.Please copy from “FaceTrackerExample/StreamingAssets/” to “Assets/StreamingAssets/” folder. "); //} // convert image to greyscale. Mat gray = new Mat(); Imgproc.cvtColor(rgbaMat, gray, Imgproc.COLOR_RGBA2GRAY); MatOfRect faces = new MatOfRect(); Imgproc.equalizeHist(gray, gray); cascade.detectMultiScale(gray, faces, 1.1f, 2, 0 | Objdetect.CASCADE_SCALE_IMAGE, new Size(gray.cols() * 0.05, gray.cols() * 0.05), new Size()); //Debug.Log ("faces " + faces.dump ()); detectResult = faces.toList(); // correct the deviation of the detection result of the face rectangle of OpenCV and Dlib. foreach (Rect r in detectResult) { r.y += (int)(r.height * 0.1f); } gray.Dispose(); } // detect face landmark points. OpenCVForUnityUtils.SetImage(faceLandmarkDetector, rgbaMat); List <List <Vector2> > landmarkPoints = new List <List <Vector2> >(); foreach (var openCVRect in detectResult) { UnityEngine.Rect rect = new UnityEngine.Rect(openCVRect.x, openCVRect.y, openCVRect.width, openCVRect.height); Debug.Log("face : " + rect); //OpenCVForUnityUtils.DrawFaceRect(imgMat, rect, new Scalar(255, 0, 0, 255), 2); List <Vector2> points = faceLandmarkDetector.DetectLandmark(rect); //OpenCVForUnityUtils.DrawFaceLandmark(imgMat, points, new Scalar(0, 255, 0, 255), 2); landmarkPoints.Add(points); } // filter non frontal faces. if (filterNonFrontalFaces) { for (int i = 0; i < landmarkPoints.Count; i++) { if (frontalFaceChecker.GetFrontalFaceRate(landmarkPoints[i]) < frontalFaceRateLowerLimit) { detectResult.RemoveAt(i); landmarkPoints.RemoveAt(i); i--; } } } // change faces. int[] face_nums = new int[landmarkPoints.Count]; for (int i = 0; i < face_nums.Length; i++) { face_nums[i] = i; } face_nums = face_nums.OrderBy(i => System.Guid.NewGuid()).ToArray(); if (landmarkPoints.Count >= 2) { DlibFaceChanger faceChanger = new DlibFaceChanger(); faceChanger.isShowingDebugFacePoints = displayDebugFacePoints; faceChanger.SetTargetImage(rgbaMat); for (int i = 1; i < face_nums.Length; i++) { faceChanger.AddFaceChangeData(rgbaMat, landmarkPoints[face_nums[0]], landmarkPoints[face_nums[i]], 1); } faceChanger.ChangeFace(); faceChanger.Dispose(); } // draw face rects. if (displayFaceRects && face_nums.Count() > 0) { int ann = face_nums[0]; UnityEngine.Rect rect_ann = new UnityEngine.Rect(detectResult[ann].x, detectResult[ann].y, detectResult[ann].width, detectResult[ann].height); OpenCVForUnityUtils.DrawFaceRect(rgbaMat, rect_ann, new Scalar(255, 255, 0, 255), 2); int bob = 0; for (int i = 1; i < face_nums.Length; i++) { bob = face_nums[i]; UnityEngine.Rect rect_bob = new UnityEngine.Rect(detectResult[bob].x, detectResult[bob].y, detectResult[bob].width, detectResult[bob].height); OpenCVForUnityUtils.DrawFaceRect(rgbaMat, rect_bob, new Scalar(255, 0, 0, 255), 2); } } frontalFaceChecker.Dispose(); Texture2D texture = new Texture2D(rgbaMat.cols(), rgbaMat.rows(), TextureFormat.RGBA32, false); OpenCVForUnity.UnityUtils.Utils.matToTexture2D(rgbaMat, texture); gameObject.GetComponent <Renderer>().material.mainTexture = texture; rgbaMat.Dispose(); }
// Update is called once per frame void Update() { if (webCamTextureToMatHelper.IsPlaying() && webCamTextureToMatHelper.DidUpdateThisFrame()) { Mat rgbaMat = webCamTextureToMatHelper.GetMat(); // detect faces. List <OpenCVForUnity.Rect> detectResult = new List <OpenCVForUnity.Rect> (); if (useDlibFaceDetecter) { OpenCVForUnityUtils.SetImage(faceLandmarkDetector, rgbaMat); List <UnityEngine.Rect> result = faceLandmarkDetector.Detect(); foreach (var unityRect in result) { detectResult.Add(new OpenCVForUnity.Rect((int)unityRect.x, (int)unityRect.y, (int)unityRect.width, (int)unityRect.height)); } } else { // convert image to greyscale. Imgproc.cvtColor(rgbaMat, grayMat, Imgproc.COLOR_RGBA2GRAY); using (Mat equalizeHistMat = new Mat()) using (MatOfRect faces = new MatOfRect()) { Imgproc.equalizeHist(grayMat, equalizeHistMat); cascade.detectMultiScale(equalizeHistMat, faces, 1.1f, 2, 0 | Objdetect.CASCADE_SCALE_IMAGE, new OpenCVForUnity.Size(equalizeHistMat.cols() * 0.15, equalizeHistMat.cols() * 0.15), new Size()); detectResult = faces.toList(); // adjust to Dilb's result. foreach (OpenCVForUnity.Rect r in detectResult) { r.y += (int)(r.height * 0.1f); } } } // face traking. if (enableTracking) { rectangleTracker.UpdateTrackedObjects(detectResult); detectResult = new List <OpenCVForUnity.Rect> (); rectangleTracker.GetObjects(detectResult, true); } // detect face landmark points. OpenCVForUnityUtils.SetImage(faceLandmarkDetector, rgbaMat); List <List <Vector2> > landmarkPoints = new List <List <Vector2> > (); foreach (var openCVRect in detectResult) { UnityEngine.Rect rect = new UnityEngine.Rect(openCVRect.x, openCVRect.y, openCVRect.width, openCVRect.height); List <Vector2> points = faceLandmarkDetector.DetectLandmark(rect); landmarkPoints.Add(points); } // filter non frontal faces. if (filterNonFrontalFaces) { for (int i = 0; i < landmarkPoints.Count; i++) { if (frontalFaceChecker.GetFrontalFaceRate(landmarkPoints [i]) < frontalFaceRateLowerLimit) { detectResult.RemoveAt(i); landmarkPoints.RemoveAt(i); i--; } } } // face changeing. if (faceMaskTexture != null && landmarkPoints.Count >= 1) { OpenCVForUnity.Utils.texture2DToMat(faceMaskTexture, faceMaskMat); if (detectedFaceRect.width == 0.0f || detectedFaceRect.height == 0.0f) { if (useDlibFaceDetecter) { OpenCVForUnityUtils.SetImage(faceLandmarkDetector, faceMaskMat); List <UnityEngine.Rect> result = faceLandmarkDetector.Detect(); if (result.Count >= 1) { detectedFaceRect = result [0]; } } else { using (Mat grayMat = new Mat()) using (Mat equalizeHistMat = new Mat()) using (MatOfRect faces = new MatOfRect()) { //convert image to greyscale Imgproc.cvtColor(faceMaskMat, grayMat, Imgproc.COLOR_RGBA2GRAY); Imgproc.equalizeHist(grayMat, equalizeHistMat); cascade.detectMultiScale(equalizeHistMat, faces, 1.1f, 2, 0 | Objdetect.CASCADE_SCALE_IMAGE, new OpenCVForUnity.Size(equalizeHistMat.cols() * 0.15, equalizeHistMat.cols() * 0.15), new Size()); //detectResult = faces.toList (); List <OpenCVForUnity.Rect> faceList = faces.toList(); if (faceList.Count >= 1) { detectedFaceRect = new UnityEngine.Rect(faceList [0].x, faceList [0].y, faceList [0].width, faceList [0].height); // Adjust to Dilb's result. detectedFaceRect.y += detectedFaceRect.height * 0.1f; } } } } if (detectedFaceRect.width > 0 || detectedFaceRect.height > 0) { OpenCVForUnityUtils.SetImage(faceLandmarkDetector, faceMaskMat); List <Vector2> souseLandmarkPoint = faceLandmarkDetector.DetectLandmark(detectedFaceRect); faceChanger.SetTargetImage(rgbaMat); for (int i = 0; i < landmarkPoints.Count; i++) { faceChanger.AddFaceChangeData(faceMaskMat, souseLandmarkPoint, landmarkPoints [i], 1); } faceChanger.ChangeFace(); if (displayFaceRects) { OpenCVForUnityUtils.DrawFaceRect(faceMaskMat, detectedFaceRect, new Scalar(255, 0, 0, 255), 2); } } } else if (landmarkPoints.Count >= 2) { faceChanger.SetTargetImage(rgbaMat); for (int i = 1; i < landmarkPoints.Count; i++) { faceChanger.AddFaceChangeData(rgbaMat, landmarkPoints [0], landmarkPoints [i], 1); } faceChanger.ChangeFace(); } // draw face rects. if (displayFaceRects) { for (int i = 0; i < detectResult.Count; i++) { UnityEngine.Rect rect = new UnityEngine.Rect(detectResult [i].x, detectResult [i].y, detectResult [i].width, detectResult [i].height); OpenCVForUnityUtils.DrawFaceRect(rgbaMat, rect, new Scalar(255, 0, 0, 255), 2); //Imgproc.putText (rgbaMat, " " + frontalFaceParam.getAngleOfFrontalFace (landmarkPoints [i]), new Point (rect.xMin, rect.yMin - 10), Core.FONT_HERSHEY_SIMPLEX, 0.5, new Scalar (255, 255, 255, 255), 2, Imgproc.LINE_AA, false); //Imgproc.putText (rgbaMat, " " + frontalFaceParam.getFrontalFaceRate (landmarkPoints [i]), new Point (rect.xMin, rect.yMin - 10), Core.FONT_HERSHEY_SIMPLEX, 0.5, new Scalar (255, 255, 255, 255), 2, Imgproc.LINE_AA, false); } } // display face mask image. if (faceMaskMat != null) { float scale = (rgbaMat.width() / 4f) / faceMaskMat.width(); float tx = rgbaMat.width() - faceMaskMat.width() * scale; float ty = 0.0f; Mat trans = new Mat(2, 3, CvType.CV_32F);//1.0, 0.0, tx, 0.0, 1.0, ty); trans.put(0, 0, scale); trans.put(0, 1, 0.0f); trans.put(0, 2, tx); trans.put(1, 0, 0.0f); trans.put(1, 1, scale); trans.put(1, 2, ty); Imgproc.warpAffine(faceMaskMat, rgbaMat, trans, rgbaMat.size(), Imgproc.INTER_LINEAR, Core.BORDER_TRANSPARENT, new Scalar(0)); } Imgproc.putText(rgbaMat, "W:" + rgbaMat.width() + " H:" + rgbaMat.height() + " SO:" + Screen.orientation, new Point(5, rgbaMat.rows() - 10), Core.FONT_HERSHEY_SIMPLEX, 0.5, new Scalar(255, 255, 255, 255), 1, Imgproc.LINE_AA, false); OpenCVForUnity.Utils.matToTexture2D(rgbaMat, texture, colors); } }