// Use this for initialization protected void Start() { imageOptimizationHelper = gameObject.GetComponent <ImageOptimizationHelper>(); webCamTextureToMatHelper = gameObject.GetComponent <HololensCameraStreamToMatHelper>(); #if WINDOWS_UWP && !DISABLE_HOLOLENSCAMSTREAM_API webCamTextureToMatHelper.frameMatAcquired += OnFrameMatAcquired; #endif webCamTextureToMatHelper.Initialize(); rectangleTracker = new RectangleTracker(); dlibShapePredictorFileName = HoloLensWithDlibFaceLandmarkDetectorExample.dlibShapePredictorFileName; dlibShapePredictorFilePath = DlibFaceLandmarkDetector.UnityUtils.Utils.getFilePath(dlibShapePredictorFileName); if (string.IsNullOrEmpty(dlibShapePredictorFilePath)) { Debug.LogError("shape predictor file does not exist. Please copy from “DlibFaceLandmarkDetector/StreamingAssets/” to “Assets/StreamingAssets/” folder. "); } faceLandmarkDetector = new FaceLandmarkDetector(dlibShapePredictorFilePath); dlibShapePredictorFilePath = DlibFaceLandmarkDetector.UnityUtils.Utils.getFilePath("sp_human_face_6.dat"); if (string.IsNullOrEmpty(dlibShapePredictorFilePath)) { Debug.LogError("shape predictor file does not exist. Please copy from “DlibFaceLandmarkDetector/StreamingAssets/” to “Assets/StreamingAssets/” folder. "); } faceLandmarkDetector4Thread = new FaceLandmarkDetector(dlibShapePredictorFilePath); useSeparateDetectionToggle.isOn = useSeparateDetection; useOpenCVDetectorToggle.isOn = useOpenCVDetector; displayCameraImageToggle.isOn = displayCameraImage; displayDetectedFaceRectToggle.isOn = displayDetectedFaceRect; }
// Use this for initialization void Start() { displayCameraPreviewToggle.isOn = displayCameraPreview; useSeparateDetectionToggle.isOn = useSeparateDetection; displayAxesToggle.isOn = displayAxes; displayHeadToggle.isOn = displayHead; displayEffectsToggle.isOn = displayEffects; imageOptimizationHelper = gameObject.GetComponent <ImageOptimizationHelper> (); webCamTextureToMatHelper = gameObject.GetComponent <HololensCameraStreamToMatHelper> (); #if NETFX_CORE webCamTextureToMatHelper.frameMatAcquired += OnFrameMatAcquired; #endif webCamTextureToMatHelper.Initialize(); rectangleTracker = new RectangleTracker(); // faceLandmarkDetector = new FaceLandmarkDetector (DlibFaceLandmarkDetector.Utils.getFilePath ("sp_human_face_68.dat")); faceLandmarkDetector = new FaceLandmarkDetector(DlibFaceLandmarkDetector.Utils.getFilePath("sp_human_face_68_for_mobile.dat")); // The coordinates of the detection object on the real world space connected with the pixel coordinates.(mm) objectPoints = new MatOfPoint3f( new Point3(-34, 90, 83), //l eye (Interpupillary breadth) new Point3(34, 90, 83), //r eye (Interpupillary breadth) new Point3(0.0, 50, 120), //nose (Nose top) new Point3(-26, 15, 83), //l mouse (Mouth breadth) new Point3(26, 15, 83), //r mouse (Mouth breadth) new Point3(-79, 90, 0.0), //l ear (Bitragion breadth) new Point3(79, 90, 0.0) //r ear (Bitragion breadth) ); imagePoints = new MatOfPoint2f(); rotMat = new Mat(3, 3, CvType.CV_64FC1); }
/// <summary> /// Raises the enable downscale toggle value changed event. /// </summary> public void OnEnableDownScaleToggleValueChanged() { enableDownScale = enableDownScaleToggle.isOn; if (webCamTextureToMatHelper != null && webCamTextureToMatHelper.IsInitialized()) { webCamTextureToMatHelper.Initialize(); } }
// Use this for initialization void Start() { useDlibFaceDetecterToggle.isOn = useDlibFaceDetecter; cascade = new CascadeClassifier(OpenCVForUnity.Utils.getFilePath("haarcascade_frontalface_alt.xml")); faceLandmarkDetector = new FaceLandmarkDetector(DlibFaceLandmarkDetector.Utils.getFilePath("sp_human_face_68.dat")); // faceLandmarkDetector = new FaceLandmarkDetector (DlibFaceLandmarkDetector.Utils.getFilePath ("sp_human_face_68_for_mobile.dat")); webCamTextureToMatHelper = gameObject.GetComponent <HololensCameraStreamToMatHelper> (); #if NETFX_CORE webCamTextureToMatHelper.frameMatAcquired += OnFrameMatAcquired; #endif webCamTextureToMatHelper.Initialize(); }
// Use this for initialization void Start() { useSeparateDetectionToggle.isOn = useSeparateDetection; displayCameraImageToggle.isOn = displayCameraImage; displayDetectedFaceRectToggle.isOn = displayDetectedFaceRect; imageOptimizationHelper = gameObject.GetComponent <ImageOptimizationHelper> (); webCamTextureToMatHelper = gameObject.GetComponent <HololensCameraStreamToMatHelper> (); #if NETFX_CORE webCamTextureToMatHelper.frameMatAcquired += OnFrameMatAcquired; #endif webCamTextureToMatHelper.Initialize(); rectangleTracker = new RectangleTracker(); // faceLandmarkDetector = new FaceLandmarkDetector (DlibFaceLandmarkDetector.Utils.getFilePath ("sp_human_face_68.dat")); faceLandmarkDetector = new FaceLandmarkDetector(DlibFaceLandmarkDetector.Utils.getFilePath("sp_human_face_68_for_mobile.dat")); }
// Use this for initialization protected override void Start() { base.Start(); displayCameraPreviewToggle.isOn = displayCameraPreview; useSeparateDetectionToggle.isOn = useSeparateDetection; displayAxesToggle.isOn = displayAxes; displayHeadToggle.isOn = displayHead; displayEffectsToggle.isOn = displayEffects; enableOpticalFlowFilterToggle.isOn = enableOpticalFlowFilter; enableLowPassFilterToggle.isOn = enableLowPassFilter; imageOptimizationHelper = gameObject.GetComponent <ImageOptimizationHelper> (); webCamTextureToMatHelper = gameObject.GetComponent <HololensCameraStreamToMatHelper> (); #if NETFX_CORE && !DISABLE_HOLOLENSCAMSTREAM_API webCamTextureToMatHelper.frameMatAcquired += OnFrameMatAcquired; #endif webCamTextureToMatHelper.Initialize(); rectangleTracker = new RectangleTracker(); faceLandmarkDetector = new FaceLandmarkDetector(DlibFaceLandmarkDetector.Utils.getFilePath("sp_human_face_68.dat")); // faceLandmarkDetector = new FaceLandmarkDetector (DlibFaceLandmarkDetector.Utils.getFilePath ("sp_human_face_68_for_mobile.dat")); // The coordinates of the detection object on the real world space connected with the pixel coordinates.(mm) objectPoints = new MatOfPoint3f( new Point3(-34, 90, 83), //l eye (Interpupillary breadth) new Point3(34, 90, 83), //r eye (Interpupillary breadth) new Point3(0.0, 50, 120), //nose (Nose top) new Point3(-26, 15, 83), //l mouse (Mouth breadth) new Point3(26, 15, 83), //r mouse (Mouth breadth) new Point3(-79, 90, 0.0), //l ear (Bitragion breadth) new Point3(79, 90, 0.0) //r ear (Bitragion breadth) ); imagePoints = new MatOfPoint2f(); rotMat = new Mat(3, 3, CvType.CV_64FC1); opticalFlowFilter = new OFPointsFilter((int)faceLandmarkDetector.GetShapePredictorNumParts()); opticalFlowFilter.diffDlib /= imageOptimizationHelper.downscaleRatio; }
/// <summary> /// Raises the change camera button click event. /// </summary> public void OnChangeCameraButtonClick() { webCamTextureToMatHelper.Initialize(null, webCamTextureToMatHelper.requestedWidth, webCamTextureToMatHelper.requestedHeight, !webCamTextureToMatHelper.requestedIsFrontFacing); }