// Use this for initialization
        void Start()
        {
            displayCameraPreviewToggle.isOn = displayCameraPreview;
            useSeparateDetectionToggle.isOn = useSeparateDetection;
            displayAxesToggle.isOn          = displayAxes;
            displayHeadToggle.isOn          = displayHead;
            displayEffectsToggle.isOn       = displayEffects;

            imageOptimizationHelper  = gameObject.GetComponent <ImageOptimizationHelper> ();
            webCamTextureToMatHelper = gameObject.GetComponent <HololensCameraStreamToMatHelper> ();
            #if NETFX_CORE
            webCamTextureToMatHelper.frameMatAcquired += OnFrameMatAcquired;
            #endif
            webCamTextureToMatHelper.Initialize();

            rectangleTracker = new RectangleTracker();
//            faceLandmarkDetector = new FaceLandmarkDetector (DlibFaceLandmarkDetector.Utils.getFilePath ("sp_human_face_68.dat"));
            faceLandmarkDetector = new FaceLandmarkDetector(DlibFaceLandmarkDetector.Utils.getFilePath("sp_human_face_68_for_mobile.dat"));

            // The coordinates of the detection object on the real world space connected with the pixel coordinates.(mm)
            objectPoints = new MatOfPoint3f(
                new Point3(-34, 90, 83),  //l eye (Interpupillary breadth)
                new Point3(34, 90, 83),   //r eye (Interpupillary breadth)
                new Point3(0.0, 50, 120), //nose (Nose top)
                new Point3(-26, 15, 83),  //l mouse (Mouth breadth)
                new Point3(26, 15, 83),   //r mouse (Mouth breadth)
                new Point3(-79, 90, 0.0), //l ear (Bitragion breadth)
                new Point3(79, 90, 0.0)   //r ear (Bitragion breadth)
                );

            imagePoints = new MatOfPoint2f();
            rotMat      = new Mat(3, 3, CvType.CV_64FC1);
        }
Exemplo n.º 2
0
        // Use this for initialization
        protected void Start()
        {
            imageOptimizationHelper  = gameObject.GetComponent <ImageOptimizationHelper>();
            webCamTextureToMatHelper = gameObject.GetComponent <HololensCameraStreamToMatHelper>();
#if WINDOWS_UWP && !DISABLE_HOLOLENSCAMSTREAM_API
            webCamTextureToMatHelper.frameMatAcquired += OnFrameMatAcquired;
#endif
            webCamTextureToMatHelper.Initialize();

            rectangleTracker = new RectangleTracker();

            dlibShapePredictorFileName = HoloLensWithDlibFaceLandmarkDetectorExample.dlibShapePredictorFileName;
            dlibShapePredictorFilePath = DlibFaceLandmarkDetector.UnityUtils.Utils.getFilePath(dlibShapePredictorFileName);
            if (string.IsNullOrEmpty(dlibShapePredictorFilePath))
            {
                Debug.LogError("shape predictor file does not exist. Please copy from “DlibFaceLandmarkDetector/StreamingAssets/” to “Assets/StreamingAssets/” folder. ");
            }
            faceLandmarkDetector = new FaceLandmarkDetector(dlibShapePredictorFilePath);


            dlibShapePredictorFilePath = DlibFaceLandmarkDetector.UnityUtils.Utils.getFilePath("sp_human_face_6.dat");
            if (string.IsNullOrEmpty(dlibShapePredictorFilePath))
            {
                Debug.LogError("shape predictor file does not exist. Please copy from “DlibFaceLandmarkDetector/StreamingAssets/” to “Assets/StreamingAssets/” folder. ");
            }
            faceLandmarkDetector4Thread = new FaceLandmarkDetector(dlibShapePredictorFilePath);


            useSeparateDetectionToggle.isOn    = useSeparateDetection;
            useOpenCVDetectorToggle.isOn       = useOpenCVDetector;
            displayCameraImageToggle.isOn      = displayCameraImage;
            displayDetectedFaceRectToggle.isOn = displayDetectedFaceRect;
        }
Exemplo n.º 3
0
        // Use this for initialization
        void Start()
        {
            useSeparateDetectionToggle.isOn    = useSeparateDetection;
            displayCameraImageToggle.isOn      = displayCameraImage;
            displayDetectedFaceRectToggle.isOn = displayDetectedFaceRect;

            imageOptimizationHelper  = gameObject.GetComponent <ImageOptimizationHelper> ();
            webCamTextureToMatHelper = gameObject.GetComponent <HololensCameraStreamToMatHelper> ();
            #if NETFX_CORE
            webCamTextureToMatHelper.frameMatAcquired += OnFrameMatAcquired;
            #endif
            webCamTextureToMatHelper.Initialize();

            rectangleTracker = new RectangleTracker();
//            faceLandmarkDetector = new FaceLandmarkDetector (DlibFaceLandmarkDetector.Utils.getFilePath ("sp_human_face_68.dat"));
            faceLandmarkDetector = new FaceLandmarkDetector(DlibFaceLandmarkDetector.Utils.getFilePath("sp_human_face_68_for_mobile.dat"));
        }
Exemplo n.º 4
0
        // Use this for initialization
        protected override void Start()
        {
            base.Start();

            displayCameraPreviewToggle.isOn    = displayCameraPreview;
            useSeparateDetectionToggle.isOn    = useSeparateDetection;
            displayAxesToggle.isOn             = displayAxes;
            displayHeadToggle.isOn             = displayHead;
            displayEffectsToggle.isOn          = displayEffects;
            enableOpticalFlowFilterToggle.isOn = enableOpticalFlowFilter;
            enableLowPassFilterToggle.isOn     = enableLowPassFilter;

            imageOptimizationHelper  = gameObject.GetComponent <ImageOptimizationHelper> ();
            webCamTextureToMatHelper = gameObject.GetComponent <HololensCameraStreamToMatHelper> ();
            #if NETFX_CORE && !DISABLE_HOLOLENSCAMSTREAM_API
            webCamTextureToMatHelper.frameMatAcquired += OnFrameMatAcquired;
            #endif
            webCamTextureToMatHelper.Initialize();

            rectangleTracker     = new RectangleTracker();
            faceLandmarkDetector = new FaceLandmarkDetector(DlibFaceLandmarkDetector.Utils.getFilePath("sp_human_face_68.dat"));
//            faceLandmarkDetector = new FaceLandmarkDetector (DlibFaceLandmarkDetector.Utils.getFilePath ("sp_human_face_68_for_mobile.dat"));

            // The coordinates of the detection object on the real world space connected with the pixel coordinates.(mm)
            objectPoints = new MatOfPoint3f(
                new Point3(-34, 90, 83),  //l eye (Interpupillary breadth)
                new Point3(34, 90, 83),   //r eye (Interpupillary breadth)
                new Point3(0.0, 50, 120), //nose (Nose top)
                new Point3(-26, 15, 83),  //l mouse (Mouth breadth)
                new Point3(26, 15, 83),   //r mouse (Mouth breadth)
                new Point3(-79, 90, 0.0), //l ear (Bitragion breadth)
                new Point3(79, 90, 0.0)   //r ear (Bitragion breadth)
                );

            imagePoints = new MatOfPoint2f();
            rotMat      = new Mat(3, 3, CvType.CV_64FC1);

            opticalFlowFilter           = new OFPointsFilter((int)faceLandmarkDetector.GetShapePredictorNumParts());
            opticalFlowFilter.diffDlib /= imageOptimizationHelper.downscaleRatio;
        }