// Update is called once per frame void Update() { if (Input.GetKeyDown(KeyCode.Escape)) { #if UNITY_5_3 || UNITY_5_3_OR_NEWER SceneManager.LoadScene("FacemojiStart"); #else Application.LoadLevel("FacemojiStart"); #endif } if (webCamTextureToMatHelper.IsPlaying() && webCamTextureToMatHelper.DidUpdateThisFrame()) { Mat rgbaMat = webCamTextureToMatHelper.GetMat(); OpenCVForUnityUtils.SetImage(faceLandmarkDetector, rgbaMat); List <UnityEngine.Rect> detectResult = faceLandmarkDetector.Detect(); foreach (var rect in detectResult) { List <Vector2> points = faceLandmarkDetector.DetectLandmark(rect); if (points.Count > 0) { live2DModelUpdate(points); //currentFacePoints = points; break; } } } }
// Update is called once per frame void Update() { if (webCamTextureToMatHelper.isPlaying() && webCamTextureToMatHelper.didUpdateThisFrame()) { Mat rgbaMat = webCamTextureToMatHelper.GetMat(); OpenCVForUnityUtils.SetImage(faceLandmarkDetector, rgbaMat); //detect face rects List <UnityEngine.Rect> detectResult = faceLandmarkDetector.Detect(); foreach (var rect in detectResult) { //detect landmark points List <Vector2> points = faceLandmarkDetector.DetectLandmark(rect); if (points.Count > 0) { //draw landmark points OpenCVForUnityUtils.DrawFaceLandmark(rgbaMat, points, new Scalar(0, 255, 0, 255), 2); } //draw face rect OpenCVForUnityUtils.DrawFaceRect(rgbaMat, rect, new Scalar(255, 0, 0, 255), 2); } Imgproc.putText(rgbaMat, "W:" + rgbaMat.width() + " H:" + rgbaMat.height() + " SO:" + Screen.orientation, new Point(5, rgbaMat.rows() - 10), Core.FONT_HERSHEY_SIMPLEX, 0.5, new Scalar(255, 255, 255, 255), 1, Imgproc.LINE_AA, false); OpenCVForUnity.Utils.matToTexture2D(rgbaMat, texture, colors); } }
// Update is called once per frame void Update() { if (webCamTextureToMatHelper.isPlaying() && webCamTextureToMatHelper.didUpdateThisFrame()) { Mat rgbaMat = webCamTextureToMatHelper.GetMat(); // Resize image for face detection Imgproc.resize(rgbaMat, rgbaMat_downscale, new Size(), 1.0 / FACE_DOWNSAMPLE_RATIO, 1.0 / FACE_DOWNSAMPLE_RATIO, Imgproc.INTER_LINEAR); OpenCVForUnityUtils.SetImage(faceLandmarkDetector, rgbaMat_downscale); // Detect faces on resize image if (count % SKIP_FRAMES == 0) { //detect face rects detectResult = faceLandmarkDetector.Detect(); } foreach (var rect in detectResult) { //detect landmark points List <Vector2> points = faceLandmarkDetector.DetectLandmark(rect); if (points.Count > 0) { List <Vector2> originalPoints = new List <Vector2> (points.Count); foreach (var point in points) { originalPoints.Add(new Vector2(point.x * FACE_DOWNSAMPLE_RATIO, point.y * FACE_DOWNSAMPLE_RATIO)); } //draw landmark points OpenCVForUnityUtils.DrawFaceLandmark(rgbaMat, originalPoints, new Scalar(0, 255, 0, 255), 2); } UnityEngine.Rect originalRect = new UnityEngine.Rect(rect.x * FACE_DOWNSAMPLE_RATIO, rect.y * FACE_DOWNSAMPLE_RATIO, rect.width * FACE_DOWNSAMPLE_RATIO, rect.height * FACE_DOWNSAMPLE_RATIO); //draw face rect OpenCVForUnityUtils.DrawFaceRect(rgbaMat, originalRect, new Scalar(255, 0, 0, 255), 2); } Imgproc.putText(rgbaMat, "Original: (" + rgbaMat.width() + "," + rgbaMat.height() + ") DownScale; (" + rgbaMat_downscale.width() + "," + rgbaMat_downscale.height() + ") SkipFrames: " + SKIP_FRAMES, new Point(5, rgbaMat.rows() - 10), Core.FONT_HERSHEY_SIMPLEX, 1.0, new Scalar(255, 255, 255, 255), 2, Imgproc.LINE_AA, false); OpenCVForUnity.Utils.matToTexture2D(rgbaMat, texture, colors); count++; } }
// Update is called once per frame void Update() { if (webCamTextureToMatHelper.isPlaying() && webCamTextureToMatHelper.didUpdateThisFrame()) { Mat rgbaMat = webCamTextureToMatHelper.GetMat(); OpenCVForUnityUtils.SetImage(faceLandmarkDetector, rgbaMat); List <UnityEngine.Rect> detectResult = faceLandmarkDetector.Detect(); foreach (var rect in detectResult) { OpenCVForUnityUtils.DrawFaceRect(rgbaMat, rect, new Scalar(255, 0, 0, 255), 2); List <Vector2> points = faceLandmarkDetector.DetectLandmark(rect); if (points.Count > 0) { live2DModelUpdate(points); currentFacePoints = points; break; } } if (isHideCameraImage) { Imgproc.rectangle(rgbaMat, new Point(0, 0), new Point(rgbaMat.width(), rgbaMat.height()), new Scalar(0, 0, 0, 255), -1); } if (currentFacePoints != null) { OpenCVForUnityUtils.DrawFaceLandmark(rgbaMat, currentFacePoints, new Scalar(0, 255, 0, 255), 2); } Imgproc.putText(rgbaMat, "W:" + rgbaMat.width() + " H:" + rgbaMat.height() + " SO:" + Screen.orientation, new Point(5, rgbaMat.rows() - 10), Core.FONT_HERSHEY_SIMPLEX, 0.5, new Scalar(255, 255, 255, 255), 1, Imgproc.LINE_AA, false); OpenCVForUnity.Utils.matToTexture2D(rgbaMat, texture, colors); } }
void FaceLM(Mat rgbaMat) { Mat downScaleRgbaMat = null; float DOWNSCALE_RATIO = 1.0f; if (enableDownScale) { downScaleRgbaMat = imageOptimizationHelper.GetDownScaleMat(rgbaMat); DOWNSCALE_RATIO = imageOptimizationHelper.downscaleRatio; } else { downScaleRgbaMat = rgbaMat; DOWNSCALE_RATIO = 1.0f; } OpenCVForUnityUtils.SetImage(faceLandmarkDetector, downScaleRgbaMat); // Imgproc.cvtColor(downScaleRgbaMat, grayMat, Imgproc.COLOR_RGBA2GRAY); List <UnityEngine.Rect> detectionResult = faceLandmarkDetector.Detect(); if (detectionResult.Count > 0) { if (enableDownScale) { for (int i = 0; i < detectionResult.Count; ++i) { var rect = detectionResult[i]; detectionResult[i] = new UnityEngine.Rect( rect.x * DOWNSCALE_RATIO, rect.y * DOWNSCALE_RATIO, rect.width * DOWNSCALE_RATIO, rect.height * DOWNSCALE_RATIO); } } OpenCVForUnityUtils.SetImage(faceLandmarkDetector, rgbaMat); //detect landmark points facePoints = faceLandmarkDetector.DetectLandmark(detectionResult[0]); OpenCVForUnityUtils.DrawFaceLandmark(rgbaMat, facePoints, new Scalar(0, 255, 0, 255), 1, false, true); } }
// Update is called once per frame void Update() { //Loop play if (capture.get(Videoio.CAP_PROP_POS_FRAMES) >= capture.get(Videoio.CAP_PROP_FRAME_COUNT)) { capture.set(Videoio.CAP_PROP_POS_FRAMES, 0); } //error PlayerLoop called recursively! on iOS.reccomend WebCamTexture. if (capture.grab()) { capture.retrieve(rgbMat, 0); Imgproc.cvtColor(rgbMat, rgbMat, Imgproc.COLOR_BGR2RGB); //Debug.Log ("Mat toString " + rgbMat.ToString ()); OpenCVForUnityUtils.SetImage(faceLandmarkDetector, rgbMat); //detect face rects List <UnityEngine.Rect> detectResult = faceLandmarkDetector.Detect(); foreach (var rect in detectResult) { //detect landmark points List <Vector2> points = faceLandmarkDetector.DetectLandmark(rect); if (points.Count > 0) { //draw landmark points OpenCVForUnityUtils.DrawFaceLandmark(rgbMat, points, new Scalar(0, 255, 0), 2); } //draw face rect OpenCVForUnityUtils.DrawFaceRect(rgbMat, rect, new Scalar(255, 0, 0), 2); } Imgproc.putText(rgbMat, "W:" + rgbMat.width() + " H:" + rgbMat.height() + " SO:" + Screen.orientation, new Point(5, rgbMat.rows() - 10), Core.FONT_HERSHEY_SIMPLEX, 0.5, new Scalar(255, 255, 255), 1, Imgproc.LINE_AA, false); OpenCVForUnity.Utils.matToTexture2D(rgbMat, texture); } }
// Use this for initialization void Run(Mat imgMat) { Utils.setDebugMode(true); if (string.IsNullOrEmpty(dlibShapePredictorFilePath)) { Debug.LogError("shape predictor file does not exist. Please copy from “DlibFaceLandmarkDetector/StreamingAssets/” to “Assets/StreamingAssets/” folder. "); } FaceLandmarkDetector faceLandmarkDetector = new FaceLandmarkDetector(dlibShapePredictorFilePath); OpenCVForUnityUtils.SetImage(faceLandmarkDetector, imgMat); //detect face rectdetecton List <FaceLandmarkDetector.RectDetection> detectResult = faceLandmarkDetector.DetectRectDetection(); foreach (var result in detectResult) { Debug.Log("rect : " + result.rect); Debug.Log("detection_confidence : " + result.detection_confidence); Debug.Log("weight_index : " + result.weight_index); //detect landmark points List <Vector2> points = faceLandmarkDetector.DetectLandmark(result.rect); Debug.Log("face points count : " + points.Count); //draw landmark points OpenCVForUnityUtils.DrawFaceLandmark(imgMat, points, new Scalar(0, 255, 0, 255), 2, true); //draw face rect OpenCVForUnityUtils.DrawFaceRect(imgMat, result, new Scalar(255, 0, 0, 255), 2); } faceLandmarkDetector.Dispose(); Utils.setDebugMode(false); }
// Update is called once per frame void Update() { //Loop play if (capture.get(Videoio.CAP_PROP_POS_FRAMES) >= capture.get(Videoio.CAP_PROP_FRAME_COUNT)) { capture.set(Videoio.CAP_PROP_POS_FRAMES, 0); } if (capture.grab()) { capture.retrieve(rgbMat, 0); Imgproc.cvtColor(rgbMat, rgbMat, Imgproc.COLOR_BGR2RGB); //Debug.Log ("Mat toString " + rgbMat.ToString ()); OpenCVForUnityUtils.SetImage(faceLandmarkDetector, rgbMat); //detect face rects List <UnityEngine.Rect> detectResult = faceLandmarkDetector.Detect(); if (detectResult.Count > 0) { //detect landmark points List <Vector2> points = faceLandmarkDetector.DetectLandmark(detectResult [0]); if (points.Count > 0) { if (shouldDrawFacePoints) { OpenCVForUnityUtils.DrawFaceLandmark(rgbMat, points, new Scalar(0, 255, 0), 2); } imagePoints.fromArray( new Point((points [38].x + points [41].x) / 2, (points [38].y + points [41].y) / 2), //l eye new Point((points [43].x + points [46].x) / 2, (points [43].y + points [46].y) / 2), //r eye new Point(points [33].x, points [33].y), //nose new Point(points [48].x, points [48].y), //l mouth new Point(points [54].x, points [54].y) //r mouth , new Point(points [0].x, points [0].y), //l ear new Point(points [16].x, points [16].y) //r ear ); Calib3d.solvePnP(objectPoints, imagePoints, camMatrix, distCoeffs, rvec, tvec); if (tvec.get(2, 0) [0] > 0) { if (Mathf.Abs((float)(points [43].y - points [46].y)) > Mathf.Abs((float)(points [42].x - points [45].x)) / 6.0) { if (shouldDrawEffects) { rightEye.SetActive(true); } } if (Mathf.Abs((float)(points [38].y - points [41].y)) > Mathf.Abs((float)(points [39].x - points [36].x)) / 6.0) { if (shouldDrawEffects) { leftEye.SetActive(true); } } if (shouldDrawHead) { head.SetActive(true); } if (shouldDrawAxes) { axes.SetActive(true); } float noseDistance = Mathf.Abs((float)(points [27].y - points [33].y)); float mouseDistance = Mathf.Abs((float)(points [62].y - points [66].y)); if (mouseDistance > noseDistance / 5.0) { if (shouldDrawEffects) { mouth.SetActive(true); foreach (ParticleSystem ps in mouthParticleSystem) { ps.enableEmission = true; ps.startSize = 40 * (mouseDistance / noseDistance); } } } else { if (shouldDrawEffects) { foreach (ParticleSystem ps in mouthParticleSystem) { ps.enableEmission = false; } } } Calib3d.Rodrigues(rvec, rotM); transformationM.SetRow(0, new Vector4((float)rotM.get(0, 0) [0], (float)rotM.get(0, 1) [0], (float)rotM.get(0, 2) [0], (float)tvec.get(0, 0) [0])); transformationM.SetRow(1, new Vector4((float)rotM.get(1, 0) [0], (float)rotM.get(1, 1) [0], (float)rotM.get(1, 2) [0], (float)tvec.get(1, 0) [0])); transformationM.SetRow(2, new Vector4((float)rotM.get(2, 0) [0], (float)rotM.get(2, 1) [0], (float)rotM.get(2, 2) [0], (float)tvec.get(2, 0) [0])); transformationM.SetRow(3, new Vector4(0, 0, 0, 1)); if (shouldMoveARCamera) { if (ARGameObject != null) { ARM = ARGameObject.transform.localToWorldMatrix * invertZM * transformationM.inverse * invertYM; ARUtils.SetTransformFromMatrix(ARCamera.transform, ref ARM); ARGameObject.SetActive(true); } } else { ARM = ARCamera.transform.localToWorldMatrix * invertYM * transformationM * invertZM; if (ARGameObject != null) { ARUtils.SetTransformFromMatrix(ARGameObject.transform, ref ARM); ARGameObject.SetActive(true); } } } } } else { rightEye.SetActive(false); leftEye.SetActive(false); head.SetActive(false); mouth.SetActive(false); axes.SetActive(false); } Imgproc.putText(rgbMat, "W:" + rgbMat.width() + " H:" + rgbMat.height() + " SO:" + Screen.orientation, new Point(5, rgbMat.rows() - 10), Core.FONT_HERSHEY_SIMPLEX, 0.5, new Scalar(255, 255, 255), 1, Imgproc.LINE_AA, false); OpenCVForUnity.Utils.matToTexture2D(rgbMat, texture, colors); } }
///// <summary> ///// Modify `data` by applying a posterization transformation to it. ///// </summary> ///// <param name="data">The byte array to modify.</param> ///// <param name="levels">The threshold levels to split each byte into.</param> //public static void ProcessImage(byte[] data, byte levels) //{ // byte factor = (byte) (byte.MaxValue / levels); // for (int i = 0; i < data.Length; i++) // { // data[i] = (byte) (data[i] / factor * factor); // } //} private void ProcessMat(Mat frameMat) { if (grayMat == null) { grayMat = new Mat(frameMat.rows(), frameMat.cols(), CvType.CV_8UC1); } // detect faces on the downscale image if (!enableSkipFrame || !imageOptimizationHelper.IsCurrentFrameSkipped()) { Mat downScaleRgbaMat = null; float DOWNSCALE_RATIO = 1.0f; if (enableDownScale) { downScaleRgbaMat = imageOptimizationHelper.GetDownScaleMat(frameMat); DOWNSCALE_RATIO = imageOptimizationHelper.downscaleRatio; } else { downScaleRgbaMat = frameMat; DOWNSCALE_RATIO = 1.0f; } // set the downscale mat OpenCVForUnityUtils.SetImage(faceLandmarkDetector, downScaleRgbaMat); //detect face rects if (useOpenCVFaceDetector) { // convert image to greyscale. Imgproc.cvtColor(downScaleRgbaMat, grayMat, Imgproc.COLOR_RGBA2GRAY); using (Mat equalizeHistMat = new Mat()) using (MatOfRect faces = new MatOfRect()) { Imgproc.equalizeHist(grayMat, equalizeHistMat); cascade.detectMultiScale(equalizeHistMat, faces, 1.1f, 2, 0 | Objdetect.CASCADE_SCALE_IMAGE, new Size(equalizeHistMat.cols() * 0.15, equalizeHistMat.cols() * 0.15), new Size()); List <OpenCVForUnity.CoreModule.Rect> opencvDetectResult = faces.toList(); // correct the deviation of the detection result of the face rectangle of OpenCV and Dlib. detectionResult.Clear(); foreach (var opencvRect in opencvDetectResult) { detectionResult.Add(new UnityEngine.Rect((float)opencvRect.x, (float)opencvRect.y + (float)(opencvRect.height * 0.1f), (float)opencvRect.width, (float)opencvRect.height)); } } } else { // Dlib's face detection processing time increases in proportion to image size. detectionResult = faceLandmarkDetector.Detect(); } if (enableDownScale) { for (int i = 0; i < detectionResult.Count; ++i) { var rect = detectionResult[i]; detectionResult[i] = new UnityEngine.Rect( rect.x * DOWNSCALE_RATIO, rect.y * DOWNSCALE_RATIO, rect.width * DOWNSCALE_RATIO, rect.height * DOWNSCALE_RATIO); } } } // set the original scale image OpenCVForUnityUtils.SetImage(faceLandmarkDetector, frameMat); // detect face landmarks on the original image foreach (var rect in detectionResult) { //detect landmark points List <Vector2> points = faceLandmarkDetector.DetectLandmark(rect); //draw landmark points OpenCVForUnityUtils.DrawFaceLandmark(frameMat, points, new Scalar(0, 255, 0, 255), 2); //draw face rect OpenCVForUnityUtils.DrawFaceRect(frameMat, rect, new Scalar(255, 0, 0, 255), 2); } }
/// <summary> /// Update is called once per frame /// </summary> void Update() { if (webCamTextureToMatHelper.IsPlaying() && webCamTextureToMatHelper.DidUpdateThisFrame()) { Mat rgbaMat = webCamTextureToMatHelper.GetMat(); OpenCVForUnityUtils.SetImage(faceLandmarkDetector, rgbaMat); // Detect faces on resize image 在调整图像时察觉脸 //detect face rects 发现脸的矩形 List <UnityEngine.Rect> detectResult = faceLandmarkDetector.Detect(); ; if (detectResult.Count > 0) { //detect landmark points 检测具有界标意义的点 List <Vector2> points = faceLandmarkDetector.DetectLandmark(detectResult[0]); //将points绘制在mat上 OpenCVForUnityUtils.DrawFaceLandmark(rgbaMat, points, new Scalar(0, 255, 0, 255), 2); imagePoints.fromArray( new Point((points[38].x + points[41].x) / 2, (points[38].y + points[41].y) / 2), //l eye (Interpupillary breadth) new Point((points[43].x + points[46].x) / 2, (points[43].y + points[46].y) / 2), //r eye (Interpupillary breadth) new Point(points[30].x, points[30].y), //nose (Nose top) new Point(points[48].x, points[48].y), //l mouth (Mouth breadth) new Point(points[54].x, points[54].y), //r mouth (Mouth breadth) new Point(points[0].x, points[0].y), //l ear (Bitragion breadth) new Point(points[16].x, points[16].y) //r ear (Bitragion breadth) ); // Estimate head pose. 估计头部姿势 if (rvec == null || tvec == null) { rvec = new Mat(3, 1, CvType.CV_64FC1); tvec = new Mat(3, 1, CvType.CV_64FC1); //从3D到2D点的pose中找到一个物体的姿势 rvec是旋转 tvec是平移向量 Calib3d.solvePnP(objectPoints, imagePoints, camMatrix, distCoeffs, rvec, tvec); } double tvec_z = tvec.get(2, 0)[0]; if (double.IsNaN(tvec_z) || tvec_z < 0) { Calib3d.solvePnP(objectPoints, imagePoints, camMatrix, distCoeffs, rvec, tvec); } else { Calib3d.solvePnP(objectPoints, imagePoints, camMatrix, distCoeffs, rvec, tvec, true, Calib3d.SOLVEPNP_ITERATIVE); } if (!double.IsNaN(tvec_z) && points.Count == 68) { cubismParameterDictionary.Clear(); Calib3d.Rodrigues(rvec, rotMat); transformationM.SetRow(0, new Vector4((float)rotMat.get(0, 0)[0], (float)rotMat.get(0, 1)[0], (float)rotMat.get(0, 2)[0], (float)tvec.get(0, 0)[0])); transformationM.SetRow(1, new Vector4((float)rotMat.get(1, 0)[0], (float)rotMat.get(1, 1)[0], (float)rotMat.get(1, 2)[0], (float)tvec.get(1, 0)[0])); transformationM.SetRow(2, new Vector4((float)rotMat.get(2, 0)[0], (float)rotMat.get(2, 1)[0], (float)rotMat.get(2, 2)[0], (float)tvec.get(2, 0)[0])); transformationM.SetRow(3, new Vector4(0, 0, 0, 1)); ARM = invertYM * transformationM * invertZM; Vector3 forward; forward.x = ARM.m02; forward.y = ARM.m12; forward.z = ARM.m22; Vector3 upwards; upwards.x = ARM.m01; upwards.y = ARM.m11; upwards.z = ARM.m21; Vector3 angles = Quaternion.LookRotation(forward, upwards).eulerAngles; float rotateX = angles.x > 180 ? angles.x - 360 : angles.x; cubismParameterDictionary.Add(ParamAngleY, (float)Math.Round(rotateX)); float rotateY = angles.y > 180 ? angles.y - 360 : angles.y; cubismParameterDictionary.Add(ParamAngleX, (float)Math.Round(-rotateY) * 2); float rotateZ = angles.z > 180 ? angles.z - 360 : angles.z; cubismParameterDictionary.Add(ParamAngleZ, (float)Math.Round(-rotateZ) * 2); //Debug.("X" + rotateX + "Y" + rotateY + "Z" + rotateZ); //ParamAngleX.BlendToValue(BlendMode,(float)(Math.Round(-rotateY) * 2)); //ParamAngleY.BlendToValue(BlendMode, (float)Math.Round(rotateX)); //ParamAngleZ.BlendToValue(BlendMode, (float)Math.Round(-rotateZ) * 2); float eyeOpen_L = Mathf.Clamp(Mathf.Abs(points[43].y - points[47].y) / (Mathf.Abs(points[43].x - points[44].x) * 0.75f), -0.1f, 2.0f); if (eyeOpen_L >= 0.8f) { eyeOpen_L = 1f; } else { eyeOpen_L = 0; } float eyeOpen_R = Mathf.Clamp(Mathf.Abs(points[38].y - points[40].y) / (Mathf.Abs(points[37].x - points[38].x) * 0.75f), -0.1f, 2.0f); if (eyeOpen_R >= 0.8f) { eyeOpen_R = 1f; } else { eyeOpen_R = 0; } // ParamEyeROpen.BlendToValue(BlendMode, eyeOpen_R); cubismParameterDictionary.Add(ParamEyeROpen, eyeOpen_R); // ParamEyeLOpen.BlendToValue(BlendMode, eyeOpen_L); cubismParameterDictionary.Add(ParamEyeLOpen, eyeOpen_L); // ParamEyeBallX.BlendToValue(BlendMode, (float)rotateY / 30f); cubismParameterDictionary.Add(ParamEyeBallX, rotateY / 30f); // ParamEyeBallX.BlendToValue(BlendMode, (float)-rotateX / 30f - 0.25f); cubismParameterDictionary.Add(ParamEyeBallY, (float)-rotateX / 30f - 0.25f); float RY = Mathf.Abs(points[19].y - points[27].y) / Mathf.Abs(points[27].y - points[29].y); RY -= 1; RY *= 4f; float LY = Mathf.Abs(points[24].y - points[27].y) / Mathf.Abs(points[27].y - points[29].y); LY -= 1; LY *= 4f; // ParamBrowRY.BlendToValue(BlendMode, RY); cubismParameterDictionary.Add(ParamBrowRY, RY); // ParamBrowLY.BlendToValue(BlendMode, LY); cubismParameterDictionary.Add(ParamBrowLY, LY); float mouthOpen = Mathf.Clamp01(Mathf.Abs(points[62].y - points[66].y) / (Mathf.Abs(points[51].y - points[62].y) + Mathf.Abs(points[66].y - points[57].y))); if (mouthOpen < 0.6f) { mouthOpen = 0; } // ParamMouthOpenY.BlendToValue(BlendMode, mouthOpen); cubismParameterDictionary.Add(ParamMouthOpenY, mouthOpen); float mouthSize = Mathf.Abs(points[48].x - points[54].x) / (Mathf.Abs(points[31].x - points[35].x)); // ParamMouthForm.BlendToValue(BlendMode, Mathf.Clamp(mouthSize, -1.0f, 1.0f)); cubismParameterDictionary.Add(ParamMouthForm, Mathf.Clamp(mouthSize, -1.0f, 1.0f)); } } OpenCVForUnity.Utils.matToTexture2D(rgbaMat, texture, webCamTextureToMatHelper.GetBufferColors()); } }
// Use this for initialization void Start() { gameObject.transform.localScale = new Vector3(imgTexture.width, imgTexture.height, 1); Debug.Log("Screen.width " + Screen.width + " Screen.height " + Screen.height + " Screen.orientation " + Screen.orientation); float width = gameObject.transform.localScale.x; float height = gameObject.transform.localScale.y; float widthScale = (float)Screen.width / width; float heightScale = (float)Screen.height / height; if (widthScale < heightScale) { Camera.main.orthographicSize = (width * (float)Screen.height / (float)Screen.width) / 2; } else { Camera.main.orthographicSize = height / 2; } Mat imgMat = new Mat(imgTexture.height, imgTexture.width, CvType.CV_8UC4); OpenCVForUnity.Utils.texture2DToMat(imgTexture, imgMat); Debug.Log("imgMat dst ToString " + imgMat.ToString()); FaceLandmarkDetector faceLandmarkDetector = new FaceLandmarkDetector(DlibFaceLandmarkDetector.Utils.getFilePath("shape_predictor_68_face_landmarks.dat")); OpenCVForUnityUtils.SetImage(faceLandmarkDetector, imgMat); //detect face rectdetecton List <FaceLandmarkDetector.RectDetection> detectResult = faceLandmarkDetector.DetectRectDetection(); foreach (var result in detectResult) { Debug.Log("rect : " + result.rect); Debug.Log("detection_confidence : " + result.detection_confidence); Debug.Log("weight_index : " + result.weight_index); // Debug.Log ("face : " + rect); Imgproc.putText(imgMat, "" + result.detection_confidence, new Point(result.rect.xMin, result.rect.yMin - 20), Core.FONT_HERSHEY_SIMPLEX, 0.5, new Scalar(255, 255, 255, 255), 1, Imgproc.LINE_AA, false); Imgproc.putText(imgMat, "" + result.weight_index, new Point(result.rect.xMin, result.rect.yMin - 5), Core.FONT_HERSHEY_SIMPLEX, 0.5, new Scalar(255, 255, 255, 255), 1, Imgproc.LINE_AA, false); //detect landmark points List <Vector2> points = faceLandmarkDetector.DetectLandmark(result.rect); Debug.Log("face points count : " + points.Count); if (points.Count > 0) { //draw landmark points OpenCVForUnityUtils.DrawFaceLandmark(imgMat, points, new Scalar(0, 255, 0, 255), 2); } //draw face rect OpenCVForUnityUtils.DrawFaceRect(imgMat, result.rect, new Scalar(255, 0, 0, 255), 2); } faceLandmarkDetector.Dispose(); Texture2D texture = new Texture2D(imgMat.cols(), imgMat.rows(), TextureFormat.RGBA32, false); OpenCVForUnity.Utils.matToTexture2D(imgMat, texture); gameObject.GetComponent <Renderer> ().material.mainTexture = texture; }