private void Run() { faceLandmarkDetector = new FaceLandmarkDetector(dlibShapePredictorFilePath); lowPassFilter = new LowPassPointsFilter((int)faceLandmarkDetector.GetShapePredictorNumParts()); kalmanFilter = new KFPointsFilter((int)faceLandmarkDetector.GetShapePredictorNumParts()); opticalFlowFilter = new OFPointsFilter((int)faceLandmarkDetector.GetShapePredictorNumParts()); #if UNITY_ANDROID && !UNITY_EDITOR // Set the requestedFPS parameter to avoid the problem of the WebCamTexture image becoming low light on some Android devices. (Pixel, pixel 2) // https://forum.unity.com/threads/android-webcamtexture-in-low-light-only-some-models.520656/ // https://forum.unity.com/threads/released-opencv-for-unity.277080/page-33#post-3445178 rearCameraRequestedFPS = webCamTextureToMatHelper.requestedFPS; if (webCamTextureToMatHelper.requestedIsFrontFacing) { webCamTextureToMatHelper.requestedFPS = 15; webCamTextureToMatHelper.Initialize(); } else { webCamTextureToMatHelper.Initialize(); } #else webCamTextureToMatHelper.Initialize(); #endif }
private void Run() { cascade = new CascadeClassifier(haarcascade_frontalface_alt_xml_filepath); // if (cascade.empty ()) { // Debug.LogError ("cascade file is not loaded.Please copy from “FaceTrackerExample/StreamingAssets/” to “Assets/StreamingAssets/” folder. "); // } faceLandmarkDetector = new FaceLandmarkDetector(dlibShapePredictorFilePath); #if UNITY_ANDROID && !UNITY_EDITOR // Set the requestedFPS parameter to avoid the problem of the WebCamTexture image becoming low light on some Android devices. (Pixel, pixel 2) // https://forum.unity.com/threads/android-webcamtexture-in-low-light-only-some-models.520656/ // https://forum.unity.com/threads/released-opencv-for-unity.277080/page-33#post-3445178 rearCameraRequestedFPS = webCamTextureToMatHelper.requestedFPS; if (webCamTextureToMatHelper.requestedIsFrontFacing) { webCamTextureToMatHelper.requestedFPS = 15; webCamTextureToMatHelper.Initialize(); } else { webCamTextureToMatHelper.Initialize(); } #else webCamTextureToMatHelper.Initialize(); #endif }
private void Run() { faceLandmarkDetector = new FaceLandmarkDetector(sp_human_face_68_dat_filepath); webCamTextureToMatHelper = gameObject.GetComponent <WebCamTextureToMatHelper> (); webCamTextureToMatHelper.Initialize(); }
private void Run() { System.Random random = new System.Random(); int randomNumber = random.Next(0, 100); if (string.IsNullOrEmpty(dlibShapePredictorFilePath)) { Debug.LogError("shape predictor file does not exist. Please copy from “DlibFaceLandmarkDetector/StreamingAssets/” to “Assets/StreamingAssets/” folder. "); } //set 3d face object points. objectPoints68 = new MatOfPoint3f( new Point3(-34, 90, 83), //l eye (Interpupillary breadth) new Point3(34, 90, 83), //r eye (Interpupillary breadth) new Point3(0.0, 50, 117), //nose (Tip) new Point3(0.0, 32, 97), //nose (Subnasale) new Point3(-79, 90, 10), //l ear (Bitragion breadth) new Point3(79, 90, 10) //r ear (Bitragion breadth) ); objectPoints17 = new MatOfPoint3f( new Point3(-34, 90, 83), //l eye (Interpupillary breadth) new Point3(34, 90, 83), //r eye (Interpupillary breadth) new Point3(0.0, 50, 117), //nose (Tip) new Point3(0.0, 32, 97), //nose (Subnasale) new Point3(-79, 90, 10), //l ear (Bitragion breadth) new Point3(79, 90, 10) //r ear (Bitragion breadth) ); objectPoints6 = new MatOfPoint3f( new Point3(-34, 90, 83), //l eye (Interpupillary breadth) new Point3(34, 90, 83), //r eye (Interpupillary breadth) new Point3(0.0, 50, 117), //nose (Tip) new Point3(0.0, 32, 97) //nose (Subnasale) ); objectPoints5 = new MatOfPoint3f( new Point3(-23, 90, 83), //l eye (Inner corner of the eye) new Point3(23, 90, 83), //r eye (Inner corner of the eye) new Point3(-50, 90, 80), //l eye (Tail of the eye) new Point3(50, 90, 80), //r eye (Tail of the eye) new Point3(0.0, 32, 97) //nose (Subnasale) ); imagePoints = new MatOfPoint2f(); faceLandmarkDetector = new FaceLandmarkDetector(dlibShapePredictorFilePath); #if UNITY_ANDROID && !UNITY_EDITOR // Avoids the front camera low light issue that occurs in only some Android devices (e.g. Google Pixel, Pixel2). webCamTextureToMatHelper.avoidAndroidFrontCameraLowLightIssue = true; #endif webCamTextureToMatHelper.Initialize(); }
private void Run() { if (string.IsNullOrEmpty(dlibShapePredictorFilePath)) { Debug.LogError("shape predictor file does not exist. Please copy from “DlibFaceLandmarkDetector/StreamingAssets/” to “Assets/StreamingAssets/” folder. "); } faceLandmarkDetector = new FaceLandmarkDetector(dlibShapePredictorFilePath); webCamTextureToMatHelper.Initialize(); }
private void Run() { //set 3d face object points. objectPoints68 = new MatOfPoint3f( new Point3(-34, 90, 83), //l eye (Interpupillary breadth) new Point3(34, 90, 83), //r eye (Interpupillary breadth) new Point3(0.0, 50, 120), //nose (Nose top) new Point3(-26, 15, 83), //l mouse (Mouth breadth) new Point3(26, 15, 83), //r mouse (Mouth breadth) new Point3(-79, 90, 0.0), //l ear (Bitragion breadth) new Point3(79, 90, 0.0) //r ear (Bitragion breadth) ); objectPoints5 = new MatOfPoint3f( new Point3(-23, 90, 83), //l eye (Inner corner of the eye) new Point3(23, 90, 83), //r eye (Inner corner of the eye) new Point3(-50, 90, 80), //l eye (Tail of the eye) new Point3(50, 90, 80), //r eye (Tail of the eye) new Point3(0.0, 50, 120) //nose (Nose top) ); imagePoints = new MatOfPoint2f(); faceLandmarkDetector = new FaceLandmarkDetector(dlibShapePredictorFilePath); #if UNITY_ANDROID && !UNITY_EDITOR // Set the requestedFPS parameter to avoid the problem of the WebCamTexture image becoming low light on some Android devices. (Pixel, pixel 2) // https://forum.unity.com/threads/android-webcamtexture-in-low-light-only-some-models.520656/ // https://forum.unity.com/threads/released-opencv-for-unity.277080/page-33#post-3445178 rearCameraRequestedFPS = webCamTextureToMatHelper.requestedFPS; if (webCamTextureToMatHelper.requestedIsFrontFacing) { webCamTextureToMatHelper.requestedFPS = 15; webCamTextureToMatHelper.Initialize(); } else { webCamTextureToMatHelper.Initialize(); } #else webCamTextureToMatHelper.Initialize(); #endif }
private void Run() { if (string.IsNullOrEmpty(dlibShapePredictorFilePath)) { Debug.LogError("shape predictor file does not exist. Please copy from “DlibFaceLandmarkDetector/StreamingAssets/” to “Assets/StreamingAssets/” folder. "); } faceLandmarkDetector = new FaceLandmarkDetector(dlibShapePredictorFilePath); #if UNITY_ANDROID && !UNITY_EDITOR // Avoids the front camera low light issue that occurs in only some Android devices (e.g. Google Pixel, Pixel2). webCamTextureToMatHelper.avoidAndroidFrontCameraLowLightIssue = true; #endif webCamTextureToMatHelper.Initialize(); }
private void Run() { if (string.IsNullOrEmpty(dlibShapePredictorFilePath)) { Debug.LogError("shape predictor file does not exist. Please copy from “DlibFaceLandmarkDetector/StreamingAssets/” to “Assets/StreamingAssets/” folder. "); } faceLandmarkDetector = new FaceLandmarkDetector(dlibShapePredictorFilePath); lowPassFilter = new LowPassPointsFilter((int)faceLandmarkDetector.GetShapePredictorNumParts()); kalmanFilter = new KFPointsFilter((int)faceLandmarkDetector.GetShapePredictorNumParts()); opticalFlowFilter = new OFPointsFilter((int)faceLandmarkDetector.GetShapePredictorNumParts()); webCamTextureToMatHelper.Initialize(); }
private void Run() { if (string.IsNullOrEmpty(dlibShapePredictorFilePath)) { Debug.LogError("shape predictor file does not exist. Please copy from “DlibFaceLandmarkDetector/StreamingAssets/” to “Assets/StreamingAssets/” folder. "); } //set 3d face object points. (right-handed coordinates system) objectPoints68 = new MatOfPoint3f( new Point3(-34, 90, 83), //l eye (Interpupillary breadth) new Point3(34, 90, 83), //r eye (Interpupillary breadth) new Point3(0.0, 50, 117), //nose (Tip) new Point3(0.0, 32, 97), //nose (Subnasale) new Point3(-79, 90, 10), //l ear (Bitragion breadth) new Point3(79, 90, 10) //r ear (Bitragion breadth) ); objectPoints17 = new MatOfPoint3f( new Point3(-34, 90, 83), //l eye (Interpupillary breadth) new Point3(34, 90, 83), //r eye (Interpupillary breadth) new Point3(0.0, 50, 117), //nose (Tip) new Point3(0.0, 32, 97), //nose (Subnasale) new Point3(-79, 90, 10), //l ear (Bitragion breadth) new Point3(79, 90, 10) //r ear (Bitragion breadth) ); objectPoints6 = new MatOfPoint3f( new Point3(-34, 90, 83), //l eye (Interpupillary breadth) new Point3(34, 90, 83), //r eye (Interpupillary breadth) new Point3(0.0, 50, 117), //nose (Tip) new Point3(0.0, 32, 97) //nose (Subnasale) ); objectPoints5 = new MatOfPoint3f( new Point3(-23, 90, 83), //l eye (Inner corner of the eye) new Point3(23, 90, 83), //r eye (Inner corner of the eye) new Point3(-50, 90, 80), //l eye (Tail of the eye) new Point3(50, 90, 80), //r eye (Tail of the eye) new Point3(0.0, 32, 97) //nose (Subnasale) ); imagePoints = new MatOfPoint2f(); faceLandmarkDetector = new FaceLandmarkDetector(dlibShapePredictorFilePath); webCamTextureToMatHelper.Initialize(); }
private void Run() { if (string.IsNullOrEmpty(dlibShapePredictorFilePath)) { Debug.LogError("shape predictor file does not exist. Please copy from “DlibFaceLandmarkDetector/StreamingAssets/” to “Assets/StreamingAssets/” folder. "); } cascade = new CascadeClassifier(haarcascade_frontalface_alt_xml_filepath); #if !UNITY_WSA_10_0 if (cascade.empty()) { Debug.LogError("cascade file is not loaded. Please copy from “OpenCVForUnity/StreamingAssets/” to “Assets/StreamingAssets/” folder. "); } #endif faceLandmarkDetector = new FaceLandmarkDetector(dlibShapePredictorFilePath); webCamTextureToMatHelper.Initialize(); }
private void Run() { //set 3d face object points. objectPoints = new MatOfPoint3f( new Point3(-34, 90, 83), //l eye (Interpupillary breadth) new Point3(34, 90, 83), //r eye (Interpupillary breadth) new Point3(0.0, 50, 120), //nose (Nose top) new Point3(-26, 15, 83), //l mouse (Mouth breadth) new Point3(26, 15, 83), //r mouse (Mouth breadth) new Point3(-79, 90, 0.0), //l ear (Bitragion breadth) new Point3(79, 90, 0.0) //r ear (Bitragion breadth) ); imagePoints = new MatOfPoint2f(); rotMat = new Mat(3, 3, CvType.CV_64FC1); faceLandmarkDetector = new FaceLandmarkDetector(sp_human_face_68_dat_filepath); webCamTextureToMatHelper = gameObject.GetComponent <WebCamTextureToMatHelper> (); webCamTextureToMatHelper.Initialize(); }
private void Run() { if (string.IsNullOrEmpty(dlibShapePredictorFilePath)) { Debug.LogError("shape predictor file does not exist. Please copy from “DlibFaceLandmarkDetector/StreamingAssets/” to “Assets/StreamingAssets/” folder. "); } cascade = new CascadeClassifier(haarcascade_frontalface_alt_xml_filepath); #if !UNITY_WSA_10_0 if (cascade.empty()) { Debug.LogError("cascade file is not loaded. Please copy from “OpenCVForUnity/StreamingAssets/” to “Assets/StreamingAssets/” folder. "); } #endif faceLandmarkDetector = new FaceLandmarkDetector(dlibShapePredictorFilePath); #if UNITY_ANDROID && !UNITY_EDITOR // Avoids the front camera low light issue that occurs in only some Android devices (e.g. Google Pixel, Pixel2). webCamTextureToMatHelper.avoidAndroidFrontCameraLowLightIssue = true; #endif webCamTextureToMatHelper.Initialize(); }
/// <summary> /// Raises the change camera button click event. /// </summary> public void OnChangeCameraButtonClick() { webCamTextureToMatHelper.Initialize(null, webCamTextureToMatHelper.requestedWidth, webCamTextureToMatHelper.requestedHeight, !webCamTextureToMatHelper.requestedIsFrontFacing); }