/// <summary> /// Raises the video capture to mat helper initialized event. /// </summary> public void OnVideoCaptureToMatHelperInitialized() { Debug.Log("OnVideoCaptureToMatHelperInitialized"); Mat rgbMat = sourceToMatHelper.GetMat(); texture = new Texture2D(rgbMat.cols(), rgbMat.rows(), TextureFormat.RGB24, false); gameObject.transform.localScale = new Vector3(rgbMat.cols(), rgbMat.rows(), 1); Debug.Log("Screen.width " + Screen.width + " Screen.height " + Screen.height + " Screen.orientation " + Screen.orientation); if (fpsMonitor != null) { fpsMonitor.Add("width", rgbMat.width().ToString()); fpsMonitor.Add("height", rgbMat.height().ToString()); fpsMonitor.Add("orientation", Screen.orientation.ToString()); } float width = gameObject.transform.localScale.x; float height = gameObject.transform.localScale.y; float widthScale = (float)Screen.width / width; float heightScale = (float)Screen.height / height; if (widthScale < heightScale) { Camera.main.orthographicSize = (width * (float)Screen.height / (float)Screen.width) / 2; } else { Camera.main.orthographicSize = height / 2; } gameObject.GetComponent <Renderer>().material.mainTexture = texture; grayMat = new Mat(rgbMat.rows(), rgbMat.cols(), CvType.CV_8UC1); cascade = new CascadeClassifier(haarcascade_frontalface_alt_xml_filepath); //if (cascade.empty()) //{ // Debug.LogError("cascade file is not loaded.Please copy from “FaceTrackerExample/StreamingAssets/” to “Assets/StreamingAssets/” folder. "); //} frontalFaceChecker = new FrontalFaceChecker(width, height); }
/// <summary> /// Raises the web cam texture to mat helper initialized event. /// </summary> public void OnWebCamTextureToMatHelperInitialized() { Debug.Log("OnWebCamTextureToMatHelperInitialized"); Mat webCamTextureMat = webCamTextureToMatHelper.GetMat(); colors = new Color32[webCamTextureMat.cols() * webCamTextureMat.rows()]; texture = new Texture2D(webCamTextureMat.cols(), webCamTextureMat.rows(), TextureFormat.RGBA32, false); gameObject.transform.localScale = new Vector3(webCamTextureMat.cols(), webCamTextureMat.rows(), 1); Debug.Log("Screen.width " + Screen.width + " Screen.height " + Screen.height + " Screen.orientation " + Screen.orientation); float width = gameObject.transform.localScale.x; float height = gameObject.transform.localScale.y; float widthScale = (float)Screen.width / width; float heightScale = (float)Screen.height / height; if (widthScale < heightScale) { Camera.main.orthographicSize = (width * (float)Screen.height / (float)Screen.width) / 2; } else { Camera.main.orthographicSize = height / 2; } gameObject.GetComponent <Renderer> ().material.mainTexture = texture; grayMat = new Mat(webCamTextureMat.rows(), webCamTextureMat.cols(), CvType.CV_8UC1); cascade = new CascadeClassifier(haarcascade_frontalface_alt_xml_filepath); // if (cascade.empty ()) { // Debug.LogError ("cascade file is not loaded.Please copy from “FaceTrackerExample/StreamingAssets/” to “Assets/StreamingAssets/” folder. "); // } frontalFaceChecker = new FrontalFaceChecker(width, height); }
private void Run() { displayFaceRectsToggle.isOn = displayFaceRects; useDlibFaceDetecterToggle.isOn = useDlibFaceDetecter; filterNonFrontalFacesToggle.isOn = filterNonFrontalFaces; useSeamlessCloneToggle.isOn = useSeamlessClone; displayDebugFacePointsToggle.isOn = displayDebugFacePoints; if (imgTexture == null) { imgTexture = Resources.Load("family") as Texture2D; } gameObject.transform.localScale = new Vector3(imgTexture.width, imgTexture.height, 1); Debug.Log("Screen.width " + Screen.width + " Screen.height " + Screen.height + " Screen.orientation " + Screen.orientation); float width = 0; float height = 0; width = gameObject.transform.localScale.x; height = gameObject.transform.localScale.y; float widthScale = (float)Screen.width / width; float heightScale = (float)Screen.height / height; if (widthScale < heightScale) { Camera.main.orthographicSize = (width * (float)Screen.height / (float)Screen.width) / 2; } else { Camera.main.orthographicSize = height / 2; } Mat rgbaMat = new Mat(imgTexture.height, imgTexture.width, CvType.CV_8UC4); OpenCVForUnity.UnityUtils.Utils.texture2DToMat(imgTexture, rgbaMat); Debug.Log("rgbaMat ToString " + rgbaMat.ToString()); if (faceLandmarkDetector == null) { faceLandmarkDetector = new FaceLandmarkDetector(sp_human_face_68_dat_filepath); } FrontalFaceChecker frontalFaceChecker = new FrontalFaceChecker(width, height); // detect faces. List <Rect> detectResult = new List <Rect>(); if (useDlibFaceDetecter) { OpenCVForUnityUtils.SetImage(faceLandmarkDetector, rgbaMat); List <UnityEngine.Rect> result = faceLandmarkDetector.Detect(); foreach (var unityRect in result) { detectResult.Add(new Rect((int)unityRect.x, (int)unityRect.y, (int)unityRect.width, (int)unityRect.height)); } } else { if (cascade == null) { cascade = new CascadeClassifier(haarcascade_frontalface_alt_xml_filepath); } //if (cascade.empty()) //{ // Debug.LogError("cascade file is not loaded.Please copy from “FaceTrackerExample/StreamingAssets/” to “Assets/StreamingAssets/” folder. "); //} // convert image to greyscale. Mat gray = new Mat(); Imgproc.cvtColor(rgbaMat, gray, Imgproc.COLOR_RGBA2GRAY); MatOfRect faces = new MatOfRect(); Imgproc.equalizeHist(gray, gray); cascade.detectMultiScale(gray, faces, 1.1f, 2, 0 | Objdetect.CASCADE_SCALE_IMAGE, new Size(gray.cols() * 0.05, gray.cols() * 0.05), new Size()); //Debug.Log ("faces " + faces.dump ()); detectResult = faces.toList(); // correct the deviation of the detection result of the face rectangle of OpenCV and Dlib. foreach (Rect r in detectResult) { r.y += (int)(r.height * 0.1f); } gray.Dispose(); } // detect face landmark points. OpenCVForUnityUtils.SetImage(faceLandmarkDetector, rgbaMat); List <List <Vector2> > landmarkPoints = new List <List <Vector2> >(); foreach (var openCVRect in detectResult) { UnityEngine.Rect rect = new UnityEngine.Rect(openCVRect.x, openCVRect.y, openCVRect.width, openCVRect.height); Debug.Log("face : " + rect); //OpenCVForUnityUtils.DrawFaceRect(imgMat, rect, new Scalar(255, 0, 0, 255), 2); List <Vector2> points = faceLandmarkDetector.DetectLandmark(rect); //OpenCVForUnityUtils.DrawFaceLandmark(imgMat, points, new Scalar(0, 255, 0, 255), 2); landmarkPoints.Add(points); } // filter non frontal facea. if (filterNonFrontalFaces) { for (int i = 0; i < landmarkPoints.Count; i++) { if (frontalFaceChecker.GetFrontalFaceRate(landmarkPoints[i]) < frontalFaceRateLowerLimit) { detectResult.RemoveAt(i); landmarkPoints.RemoveAt(i); i--; } } } // swap faces. int[] face_nums = new int[landmarkPoints.Count]; for (int i = 0; i < face_nums.Length; i++) { face_nums[i] = i; } face_nums = face_nums.OrderBy(i => System.Guid.NewGuid()).ToArray(); if (landmarkPoints.Count >= 2) { DlibFaceSwapper faceSwapper = new DlibFaceSwapper(); faceSwapper.useSeamlessCloneForPasteFaces = useSeamlessClone; faceSwapper.isShowingDebugFacePoints = displayDebugFacePoints; int ann = 0, bob = 0; for (int i = 0; i < face_nums.Length - 1; i += 2) { ann = face_nums[i]; bob = face_nums[i + 1]; faceSwapper.SwapFaces(rgbaMat, landmarkPoints[ann], landmarkPoints[bob], 1); } faceSwapper.Dispose(); } // draw face rects. if (displayFaceRects) { int ann = 0, bob = 0; for (int i = 0; i < face_nums.Length - 1; i += 2) { ann = face_nums[i]; bob = face_nums[i + 1]; UnityEngine.Rect rect_ann = new UnityEngine.Rect(detectResult[ann].x, detectResult[ann].y, detectResult[ann].width, detectResult[ann].height); UnityEngine.Rect rect_bob = new UnityEngine.Rect(detectResult[bob].x, detectResult[bob].y, detectResult[bob].width, detectResult[bob].height); Scalar color = new Scalar(UnityEngine.Random.Range(0, 256), UnityEngine.Random.Range(0, 256), UnityEngine.Random.Range(0, 256), 255); OpenCVForUnityUtils.DrawFaceRect(rgbaMat, rect_ann, color, 2); OpenCVForUnityUtils.DrawFaceRect(rgbaMat, rect_bob, color, 2); //Imgproc.putText (rgbaMat, "" + i % 2, new Point (rect_ann.xMin, rect_ann.yMin - 10), Imgproc.FONT_HERSHEY_SIMPLEX, 1.0, color, 2, Imgproc.LINE_AA, false); //Imgproc.putText (rgbaMat, "" + (i % 2 + 1), new Point (rect_bob.xMin, rect_bob.yMin - 10), Imgproc.FONT_HERSHEY_SIMPLEX, 1.0, color, 2, Imgproc.LINE_AA, false); } } frontalFaceChecker.Dispose(); Texture2D texture = new Texture2D(rgbaMat.cols(), rgbaMat.rows(), TextureFormat.RGBA32, false); OpenCVForUnity.UnityUtils.Utils.matToTexture2D(rgbaMat, texture); gameObject.GetComponent <Renderer>().material.mainTexture = texture; rgbaMat.Dispose(); }
void Run() { Mat rgbaMat = new Mat(imgTexture.height, imgTexture.width, CvType.CV_8UC4); OpenCVForUnity.Utils.texture2DToMat(imgTexture, rgbaMat); //Mat dstMat = Imgcodecs.imread(Application.dataPath + "/Resources/changer.jpg"); //Imgproc.cvtColor(dstMat, dstMat, Imgproc.COLOR_BGR2RGB); FrontalFaceChecker frontalFaceChecker = new FrontalFaceChecker(imgTexture.width, imgTexture.height); //检测正面脸,范围整个t2d //1、人脸检测。两种检测人脸rect方法,toggle切换 List <OpenCVForUnity.Rect> detectResult = new List <OpenCVForUnity.Rect>(); if (useDlibFaceDetecter) { OpenCVForUnityUtils.SetImage(faceLandmarkDetector, rgbaMat); List <UnityEngine.Rect> result = faceLandmarkDetector.Detect(); foreach (UnityEngine.Rect unityRect in result) { detectResult.Add(new OpenCVForUnity.Rect((int)unityRect.x, (int)unityRect.y, (int)unityRect.width, (int)unityRect.height)); } } else { cascade = new CascadeClassifier(haarcascade_frontalface_alt_xml_filepath); Mat gray = new Mat(); Imgproc.cvtColor(rgbaMat, gray, Imgproc.COLOR_RGBA2GRAY); MatOfRect faces = new MatOfRect(); Imgproc.equalizeHist(gray, gray); cascade.detectMultiScale(gray, faces, 1.1f, 2, 0 | Objdetect.CASCADE_SCALE_IMAGE, new OpenCVForUnity.Size(gray.cols() * 0.05, gray.cols() * 0.05), new Size()); detectResult = faces.toList(); // adjust to Dilb's result. foreach (OpenCVForUnity.Rect r in detectResult) { r.y += (int)(r.height * 0.1f); } gray.Dispose(); } //0-拷贝到->1,靠list中排序,改变顺序 detectResult.Sort(delegate(OpenCVForUnity.Rect x, OpenCVForUnity.Rect y) { return(x.x.CompareTo(y.x)); //顺序与delegate中一致,升序排列 //return y.x.CompareTo(x.x); //顺序与delegate中不同,降序排列 }); //Debug.Log("[0]" + detectResult[0].x + ", [1]" + detectResult[1].x); //2、关键点定位。通过上一步的rect,检测人脸标记点landmark OpenCVForUnityUtils.SetImage(faceLandmarkDetector, rgbaMat); List <List <Vector2> > landmarkPoints = new List <List <Vector2> >(); //TODO...只检测一张脸,另一张改成UV图,手动输入68个points /* * foreach (OpenCVForUnity.Rect openCVRect in detectResult) * { * UnityEngine.Rect rect = new UnityEngine.Rect(openCVRect.x, openCVRect.y, openCVRect.width, openCVRect.height); * //Debug.Log("face : " + rect); * * OpenCVForUnityUtils.DrawFaceRect(rgbaMat, rect, new Scalar(255, 0, 0, 255), 2); //画框rect * Imgproc.circle(rgbaMat, new Point(rect.x, rect.y), 0, new Scalar(0, 255, 0, 255), 5, Imgproc.LINE_8, 0); * Imgproc.putText(rgbaMat, rect.x + "x" + rect.y, new Point(rect.x + 5, rect.y -5), 1, 1, new Scalar(0, 255, 0, 255)); * * List<Vector2> points = faceLandmarkDetector.DetectLandmark(rect); * //OpenCVForUnityUtils.DrawFaceLandmark(rgbaMat, points, new Scalar(0, 255, 0, 255), 2); //画框landmark * landmarkPoints.Add(points); * } */ //检测一张照片的脸的landmark点 OpenCVForUnity.Rect openCVRect = detectResult[0]; UnityEngine.Rect rect = new UnityEngine.Rect(openCVRect.x, openCVRect.y, openCVRect.width, openCVRect.height); //OpenCVForUnityUtils.DrawFaceRect(rgbaMat, rect, new Scalar(255, 0, 0, 255), 2); //画框rect //Imgproc.circle(rgbaMat, new Point(rect.x, rect.y), 0, new Scalar(0, 255, 0, 255), 5, Imgproc.LINE_8, 0); //Imgproc.putText(rgbaMat, rect.x + "x" + rect.y, new Point(rect.x + 5, rect.y - 5), 1, 1, new Scalar(0, 255, 0, 255)); List <Vector2> points = faceLandmarkDetector.DetectLandmark(rect); //通过检测器从rect中提取point //OpenCVForUnityUtils.DrawFaceLandmark(rgbaMat, points, new Scalar(0, 255, 0, 255), 2); //画框landmark landmarkPoints.Add(points); string log = ""; for (int i = 0; i < points.Count; i++) { //Debug.Log("[" + i + "] " + points[i]); log += "new Vector2(" + ((int)points[i].x + 500) + "," + (int)points[i].y + "),\n"; //Imgproc.circle(rgbaMat, new Point(10, 10), 0, new Scalar(255, 0, 0, 255), 4, Imgproc.LINE_8, 0); //检查原点 //Imgproc.putText(rgbaMat, i.ToString(), new Point(landmarkPoints[0][i].x, landmarkPoints[0][i].y), 1, 1, new Scalar(0, 255, 0, 255)); //Imgproc.circle(rgbaMat, new Point(landmarkPoints[0][i].x, landmarkPoints[0][i].y), 0, new Scalar(0, 255, 0, 255), 2, Imgproc.LINE_8, 0); //绘制68点 //Imgproc.putText(rgbaMat, rect.x + "x" + rect.y, new Point(rect.x + 5, rect.y - 5), 1, 1, new Scalar(0, 255, 0, 255)); //Imgproc.putText(rgbaMat, rect.x + "x" + rect.y, new Point(rect.x + 5, rect.y - 5), 1, 1, new Scalar(0, 255, 0, 255)); /* * * * * * * * * * * * * * jaw; // [0-16] * * rightBrow; // [17-21] * * leftBrow; // [22-26] * * nose; // [27-35] * * rightEye; // [36-41] * * leftEye; // [42-47] * * mouth; // [48-59] * * mouth2; // [60-67] * * * * * * * * * * * * * */ /* * if (i >= 0 && i <= 16) * { * Imgproc.circle(rgbaMat, new Point(points[i].x, points[i].y), 0, new Scalar(255, 0, 0, 255), 2, Imgproc.LINE_8, 0); //红 * } * else if (i >= 17 && i <= 21) * { * Imgproc.circle(rgbaMat, new Point(points[i].x, points[i].y), 0, new Scalar(255, 127, 0, 255), 2, Imgproc.LINE_8, 0); //橙 * } * else if (i >= 22 && i <= 26) * { * Imgproc.circle(rgbaMat, new Point(points[i].x, points[i].y), 0, new Scalar(255, 255, 0, 255), 2, Imgproc.LINE_8, 0); //黄 * } * else if (i >= 27 && i <= 35) * { * Imgproc.circle(rgbaMat, new Point(points[i].x, points[i].y), 0, new Scalar(0, 255, 0, 255), 2, Imgproc.LINE_8, 0); //绿 * } * else if (i >= 36 && i <= 21) * { * Imgproc.circle(rgbaMat, new Point(points[i].x, points[i].y), 0, new Scalar(0, 255, 170, 255), 2, Imgproc.LINE_8, 0); //青 * } * else if (i >= 42 && i <= 47) * { * Imgproc.circle(rgbaMat, new Point(points[i].x, points[i].y), 0, new Scalar(0, 0, 255, 255), 2, Imgproc.LINE_8, 0); //蓝 * } * else if (i >= 48 && i <= 59) * { * Imgproc.circle(rgbaMat, new Point(points[i].x, points[i].y), 0, new Scalar(128, 255, 0, 255), 2, Imgproc.LINE_8, 0); //紫 * } * else if (i >= 60 && i <= 67) * { * Imgproc.circle(rgbaMat, new Point(points[i].x, points[i].y), 0, new Scalar(255, 180, 255, 255), 2, Imgproc.LINE_8, 0); //粉红 * } */ } //Debug.Log(log); //手写一张UV的landmark点 List <Vector2> uvs = new List <Vector2>() { //0-7 new Vector2(583, 210), //0 new Vector2(584, 250), //1 new Vector2(585, 305), //2 new Vector2(600, 360), //3 new Vector2(630, 410), //4 new Vector2(670, 425), //5 new Vector2(710, 440), //6 new Vector2(740, 445), //7 //8 new Vector2(770, 450), //8+轴 //9-16 new Vector2(800, 445), //9 new Vector2(830, 440), //10 new Vector2(870, 425), //11 new Vector2(910, 410), //12 new Vector2(940, 360), //13 new Vector2(955, 305), //14 new Vector2(956, 250), //15 new Vector2(957, 210), //16 //17-21 leftBrow new Vector2(655, 165), //17 new Vector2(680, 155), //18 new Vector2(710, 160), //19 new Vector2(730, 170), //20 new Vector2(750, 190), //21 //22-26 rightBrow new Vector2(790, 190), //22 new Vector2(810, 170), //23 new Vector2(830, 160), //24 new Vector2(860, 155), //25 new Vector2(885, 165), //26 //27-30 nose竖 new Vector2(770, 220), //27 new Vector2(770, 250), //28 new Vector2(770, 275), //29 new Vector2(770, 300), //30 //31-35 nose横 new Vector2(740, 312), //31 new Vector2(755, 316), //32 new Vector2(770, 320), //33 new Vector2(785, 316), //34 new Vector2(800, 312), //35 //36-41 leftEye new Vector2(670, 215), //36 new Vector2(690, 200), //37 new Vector2(715, 205), //38 new Vector2(730, 225), //39 new Vector2(710, 230), //40 new Vector2(690, 227), //41 //42-47 rightEye new Vector2(810, 225), //42 new Vector2(825, 205), //43 new Vector2(850, 200), //44 new Vector2(870, 215), //45 new Vector2(855, 227), //46 new Vector2(830, 230), //47 //48-59 mouth new Vector2(720, 360), //48-l new Vector2(735, 355), //49 new Vector2(750, 350), //50 new Vector2(770, 352), //51+u new Vector2(790, 350), //52 new Vector2(805, 355), //53 new Vector2(820, 360), //54-r new Vector2(805, 375), //55 new Vector2(790, 382), //56 new Vector2(770, 380), //57+d new Vector2(750, 382), //58 new Vector2(735, 375), //59 //60-67 mouth2 new Vector2(730, 365), //60-l new Vector2(750, 357), //61 new Vector2(770, 354), //62+u new Vector2(790, 357), //63 new Vector2(810, 365), //64-r new Vector2(790, 370), //65 new Vector2(770, 375), //66+d new Vector2(750, 370), //67 }; landmarkPoints.Add(uvs); for (int i = 0; i < landmarkPoints[1].Count; i++) { //Imgproc.putText(rgbaMat, i.ToString(), new Point(landmarkPoints[1][i].x, landmarkPoints[1][i].y), 1, 1, new Scalar(255, 0, 0, 255)); /* * * * * * * * * * * * * * jaw; // [0-16] * * rightBrow; // [17-21] * * leftBrow; // [22-26] * * nose; // [27-35] * * rightEye; // [36-41] * * leftEye; // [42-47] * * mouth; // [48-59] * * mouth2; // [60-67] * * * * * * * * * * * * * */ /* * if (i >= 0 && i <= 16) * { * Imgproc.circle(rgbaMat, new Point(landmarkPoints[1][i].x, landmarkPoints[1][i].y), 0, new Scalar(255, 0, 0, 255), 5, Imgproc.LINE_8, 0); //红 * } * else if (i >= 17 && i <= 21) * { * Imgproc.circle(rgbaMat, new Point(landmarkPoints[1][i].x, landmarkPoints[1][i].y), 0, new Scalar(255, 0, 0, 255), 5, Imgproc.LINE_8, 0); //橙 * } * else if (i >= 22 && i <= 26) * { * Imgproc.circle(rgbaMat, new Point(landmarkPoints[1][i].x, landmarkPoints[1][i].y), 0, new Scalar(255, 0, 0, 255), 5, Imgproc.LINE_8, 0); //黄 * } * else if (i >= 27 && i <= 35) * { * Imgproc.circle(rgbaMat, new Point(landmarkPoints[1][i].x, landmarkPoints[1][i].y), 0, new Scalar(255, 0, 0, 255), 5, Imgproc.LINE_8, 0); //绿 * } * else if (i >= 36 && i <= 21) * { * Imgproc.circle(rgbaMat, new Point(landmarkPoints[1][i].x, landmarkPoints[1][i].y), 0, new Scalar(255, 0, 0, 255), 5, Imgproc.LINE_8, 0); //青 * } * else if (i >= 42 && i <= 47) * { * Imgproc.circle(rgbaMat, new Point(landmarkPoints[1][i].x, landmarkPoints[1][i].y), 0, new Scalar(255, 0, 0, 255), 5, Imgproc.LINE_8, 0); //蓝 * } * else if (i >= 48 && i <= 59) * { * Imgproc.circle(rgbaMat, new Point(landmarkPoints[1][i].x, landmarkPoints[1][i].y), 0, new Scalar(255, 0, 0, 255), 5, Imgproc.LINE_8, 0); //紫 * } * else if (i >= 60 && i <= 67) * { * //Imgproc.circle(rgbaMat, new Point(landmarkPoints[1][i].x, landmarkPoints[1][i].y), 0, new Scalar(255, 0, 0, 255), 5, Imgproc.LINE_8, 0); //粉红 * } */ } //过滤非正面的脸 if (filterNonFrontalFaces) { for (int i = 0; i < landmarkPoints.Count; i++) { if (frontalFaceChecker.GetFrontalFaceRate(landmarkPoints[i]) < frontalFaceRateLowerLimit) //阈值0~1 { detectResult.RemoveAt(i); landmarkPoints.RemoveAt(i); i--; } } } //1张脸时只提取关键点,2张脸才换脸 if (landmarkPoints.Count == 2) { //开始换脸 DlibFaceChanger faceChanger = new DlibFaceChanger(); faceChanger.isShowingDebugFacePoints = displayDebugFacePoints; faceChanger.SetTargetImage(rgbaMat); //目标Mat faceChanger.AddFaceChangeData(rgbaMat, landmarkPoints[0], landmarkPoints[1], 0.9f); //源Mat,从0拷贝到1 faceChanger.ChangeFace(); faceChanger.Dispose(); } frontalFaceChecker.Dispose(); //背景图 Texture2D t2d = new Texture2D(rgbaMat.width(), rgbaMat.height(), TextureFormat.RGBA32, false); OpenCVForUnity.Utils.matToTexture2D(rgbaMat, t2d); Sprite sp = Sprite.Create(t2d, new UnityEngine.Rect(0, 0, t2d.width, t2d.height), Vector2.zero); srcImage.sprite = sp; srcImage.preserveAspect = true; Mat dstMat = new Mat(rgbaMat, new OpenCVForUnity.Rect(rgbaMat.width() / 2, 0, rgbaMat.width() / 2, rgbaMat.height())); Texture2D dst_t2d = new Texture2D(dstMat.width(), dstMat.height(), TextureFormat.RGBA32, false); OpenCVForUnity.Utils.matToTexture2D(dstMat, dst_t2d); mesh.materials[1].mainTexture = dst_t2d; rgbaMat.Dispose(); }
private void Run() { rectangleTracker = new RectangleTracker(); frontalFaceChecker = new FrontalFaceChecker((float)frameWidth, (float)frameHeight); faceLandmarkDetector = new FaceLandmarkDetector(sp_human_face_68_dat_filepath); faceSwapper = new DlibFaceSwapper(); faceSwapper.useSeamlessCloneForPasteFaces = useSeamlessClone; faceSwapper.isShowingDebugFacePoints = displayDebugFacePoints; rgbMat = new Mat(); capture.open(couple_avi_filepath); if (capture.isOpened()) { Debug.Log("capture.isOpened() true"); } else { Debug.Log("capture.isOpened() false"); } Debug.Log("CAP_PROP_FORMAT: " + capture.get(Videoio.CAP_PROP_FORMAT)); Debug.Log("CV_CAP_PROP_PREVIEW_FORMAT: " + capture.get(Videoio.CV_CAP_PROP_PREVIEW_FORMAT)); Debug.Log("CAP_PROP_POS_MSEC: " + capture.get(Videoio.CAP_PROP_POS_MSEC)); Debug.Log("CAP_PROP_POS_FRAMES: " + capture.get(Videoio.CAP_PROP_POS_FRAMES)); Debug.Log("CAP_PROP_POS_AVI_RATIO: " + capture.get(Videoio.CAP_PROP_POS_AVI_RATIO)); Debug.Log("CAP_PROP_FRAME_COUNT: " + capture.get(Videoio.CAP_PROP_FRAME_COUNT)); Debug.Log("CAP_PROP_FPS: " + capture.get(Videoio.CAP_PROP_FPS)); Debug.Log("CAP_PROP_FRAME_WIDTH: " + capture.get(Videoio.CAP_PROP_FRAME_WIDTH)); Debug.Log("CAP_PROP_FRAME_HEIGHT: " + capture.get(Videoio.CAP_PROP_FRAME_HEIGHT)); texture = new Texture2D((int)(frameWidth), (int)(frameHeight), TextureFormat.RGBA32, false); gameObject.transform.localScale = new Vector3((float)frameWidth, (float)frameHeight, 1); float widthScale = (float)Screen.width / (float)frameWidth; float heightScale = (float)Screen.height / (float)frameHeight; if (widthScale < heightScale) { Camera.main.orthographicSize = ((float)frameWidth * (float)Screen.height / (float)Screen.width) / 2; } else { Camera.main.orthographicSize = (float)frameHeight / 2; } gameObject.GetComponent <Renderer> ().material.mainTexture = texture; grayMat = new Mat((int)frameHeight, (int)frameWidth, CvType.CV_8UC1); cascade = new CascadeClassifier(haarcascade_frontalface_alt_xml_filepath); // if (cascade.empty ()) { // Debug.LogError ("cascade file is not loaded.Please copy from “FaceTrackerExample/StreamingAssets/” to “Assets/StreamingAssets/” folder. "); // } displayFaceRectsToggle.isOn = displayFaceRects; useDlibFaceDetecterToggle.isOn = useDlibFaceDetecter; filterNonFrontalFacesToggle.isOn = filterNonFrontalFaces; useSeamlessCloneToggle.isOn = useSeamlessClone; enableTrackingToggle.isOn = enableTracking; displayDebugFacePointsToggle.isOn = displayDebugFacePoints; }