Exemplo n.º 1
0
        private void Run()
        {
            rectangleTracker = new RectangleTracker();

            faceLandmarkDetector = new FaceLandmarkDetector(sp_human_face_68_dat_filepath);

            lowPassFilterDict     = new Dictionary <int, LowPassPointsFilter>();
            opticalFlowFilterDict = new Dictionary <int, OFPointsFilter>();

            faceSwapper = new DlibFaceSwapper();
            faceSwapper.useSeamlessCloneForPasteFaces = useSeamlessClone;
            faceSwapper.isShowingDebugFacePoints      = displayDebugFacePoints;

            if (string.IsNullOrEmpty(sourceToMatHelper.requestedVideoFilePath))
            {
                sourceToMatHelper.requestedVideoFilePath = VIDEO_FILENAME;
            }
            sourceToMatHelper.outputColorFormat = VideoCaptureToMatHelper.ColorFormat.RGB;
            sourceToMatHelper.Initialize();

            displayFaceRectsToggle.isOn       = displayFaceRects;
            useDlibFaceDetecterToggle.isOn    = useDlibFaceDetecter;
            enableNoiseFilterToggle.isOn      = enableNoiseFilter;
            filterNonFrontalFacesToggle.isOn  = filterNonFrontalFaces;
            useSeamlessCloneToggle.isOn       = useSeamlessClone;
            displayDebugFacePointsToggle.isOn = displayDebugFacePoints;
        }
        private void Run()
        {
            meshOverlay = this.GetComponent <TrackedMeshOverlay>();

            shader_FadeID            = Shader.PropertyToID("_Fade");
            shader_ColorCorrectionID = Shader.PropertyToID("_ColorCorrection");
            shader_LUTTexID          = Shader.PropertyToID("_LUTTex");

            rectangleTracker = new RectangleTracker();

            faceLandmarkDetector = new FaceLandmarkDetector(sp_human_face_68_dat_filepath);

            lowPassFilterDict     = new Dictionary <int, LowPassPointsFilter>();
            opticalFlowFilterDict = new Dictionary <int, OFPointsFilter>();

            faceMaskColorCorrector = new FaceMaskColorCorrector();

            if (string.IsNullOrEmpty(sourceToMatHelper.requestedVideoFilePath))
            {
                sourceToMatHelper.requestedVideoFilePath = VIDEO_FILENAME;
            }
            sourceToMatHelper.outputColorFormat = VideoCaptureToMatHelper.ColorFormat.RGB;
            sourceToMatHelper.Initialize();

            displayFaceRectsToggle.isOn       = displayFaceRects;
            useDlibFaceDetecterToggle.isOn    = useDlibFaceDetecter;
            enableNoiseFilterToggle.isOn      = enableNoiseFilter;
            enableColorCorrectionToggle.isOn  = enableColorCorrection;
            filterNonFrontalFacesToggle.isOn  = filterNonFrontalFaces;
            displayDebugFacePointsToggle.isOn = displayDebugFacePoints;
        }
Exemplo n.º 3
0
        // Use this for initialization
        void Start()
        {
            fpsMonitor = GetComponent <FpsMonitor>();

            sourceToMatHelper = gameObject.GetComponent <VideoCaptureToMatHelper>();
            if (string.IsNullOrEmpty(sourceToMatHelper.requestedVideoFilePath))
            {
                sourceToMatHelper.requestedVideoFilePath = VIDEO_FILENAME;
            }
            sourceToMatHelper.outputColorFormat = VideoCaptureToMatHelper.ColorFormat.RGB; // Tracking API must handle 3 channels Mat image.
            sourceToMatHelper.Initialize();
        }
        private void Run()
        {
            if (string.IsNullOrEmpty(dlibShapePredictorFilePath))
            {
                Debug.LogError("shape predictor file does not exist. Please copy from “DlibFaceLandmarkDetector/StreamingAssets/” to “Assets/StreamingAssets/” folder. ");
            }

            //set 3d face object points. (right-handed coordinates system)
            objectPoints68 = new MatOfPoint3f(
                new Point3(-34, 90, 83),  //l eye (Interpupillary breadth)
                new Point3(34, 90, 83),   //r eye (Interpupillary breadth)
                new Point3(0.0, 50, 117), //nose (Tip)
                new Point3(0.0, 32, 97),  //nose (Subnasale)
                new Point3(-79, 90, 10),  //l ear (Bitragion breadth)
                new Point3(79, 90, 10)    //r ear (Bitragion breadth)
                );

            objectPoints17 = new MatOfPoint3f(
                new Point3(-34, 90, 83),  //l eye (Interpupillary breadth)
                new Point3(34, 90, 83),   //r eye (Interpupillary breadth)
                new Point3(0.0, 50, 117), //nose (Tip)
                new Point3(0.0, 32, 97),  //nose (Subnasale)
                new Point3(-79, 90, 10),  //l ear (Bitragion breadth)
                new Point3(79, 90, 10)    //r ear (Bitragion breadth)
                );

            objectPoints6 = new MatOfPoint3f(
                new Point3(-34, 90, 83),  //l eye (Interpupillary breadth)
                new Point3(34, 90, 83),   //r eye (Interpupillary breadth)
                new Point3(0.0, 50, 117), //nose (Tip)
                new Point3(0.0, 32, 97)   //nose (Subnasale)
                );

            objectPoints5 = new MatOfPoint3f(
                new Point3(-23, 90, 83), //l eye (Inner corner of the eye)
                new Point3(23, 90, 83),  //r eye (Inner corner of the eye)
                new Point3(-50, 90, 80), //l eye (Tail of the eye)
                new Point3(50, 90, 80),  //r eye (Tail of the eye)
                new Point3(0.0, 32, 97)  //nose (Subnasale)
                );

            imagePoints = new MatOfPoint2f();


            faceLandmarkDetector = new FaceLandmarkDetector(dlibShapePredictorFilePath);

            if (string.IsNullOrEmpty(sourceToMatHelper.requestedVideoFilePath))
            {
                sourceToMatHelper.requestedVideoFilePath = VIDEO_FILENAME;
            }
            sourceToMatHelper.outputColorFormat = VideoCaptureToMatHelper.ColorFormat.RGB;
            sourceToMatHelper.Initialize();
        }
        private void Run()
        {
            if (string.IsNullOrEmpty(dlibShapePredictorFilePath))
            {
                Debug.LogError("shape predictor file does not exist. Please copy from “DlibFaceLandmarkDetector/StreamingAssets/” to “Assets/StreamingAssets/” folder. ");
            }

            faceLandmarkDetector = new FaceLandmarkDetector(dlibShapePredictorFilePath);

            if (string.IsNullOrEmpty(sourceToMatHelper.requestedVideoFilePath))
            {
                sourceToMatHelper.requestedVideoFilePath = VIDEO_FILENAME;
            }
            sourceToMatHelper.outputColorFormat = VideoCaptureToMatHelper.ColorFormat.RGB;
            sourceToMatHelper.Initialize();
        }
        private void Run()
        {
            if (string.IsNullOrEmpty(dlibShapePredictorFilePath))
            {
                Debug.LogError("shape predictor file does not exist. Please copy from “DlibFaceLandmarkDetector/StreamingAssets/” to “Assets/StreamingAssets/” folder. ");
            }

            faceLandmarkDetector = new FaceLandmarkDetector(dlibShapePredictorFilePath);

            lowPassFilter     = new LowPassPointsFilter((int)faceLandmarkDetector.GetShapePredictorNumParts());
            kalmanFilter      = new KFPointsFilter((int)faceLandmarkDetector.GetShapePredictorNumParts());
            opticalFlowFilter = new OFPointsFilter((int)faceLandmarkDetector.GetShapePredictorNumParts());

            if (string.IsNullOrEmpty(sourceToMatHelper.requestedVideoFilePath))
            {
                sourceToMatHelper.requestedVideoFilePath = VIDEO_FILENAME;
            }
            sourceToMatHelper.outputColorFormat = VideoCaptureToMatHelper.ColorFormat.RGB;
            sourceToMatHelper.Initialize();
        }