Ejemplo n.º 1
0
        private static void StartFace()
        {
            pxcmStatus     status     = senseManager.EnableFace();
            PXCMFaceModule faceModule = senseManager.QueryFace();

            if (status != pxcmStatus.PXCM_STATUS_NO_ERROR || faceModule == null)
            {
                return;
            }

            PXCMFaceConfiguration faceConfig = faceModule.CreateActiveConfiguration();

            if (faceConfig == null)
            {
                return;
            }

            faceConfig.SetTrackingMode(PXCMFaceConfiguration.TrackingModeType.FACE_MODE_COLOR_PLUS_DEPTH);
            faceConfig.strategy = PXCMFaceConfiguration.TrackingStrategyType.STRATEGY_RIGHT_TO_LEFT;

            PXCMFaceConfiguration.ExpressionsConfiguration econfiguration = faceConfig.QueryExpressions();
            if (econfiguration == null)
            {
                return;
            }
            econfiguration.properties.maxTrackedFaces = 1;
            econfiguration.EnableAllExpressions();
            econfiguration.Enable();
            faceConfig.ApplyChanges();

            faceConfig.pose.isEnabled      = true;
            faceConfig.landmarks.isEnabled = true;

            faceData = faceModule.CreateOutput();
        }
Ejemplo n.º 2
0
        private void startButton_Click(object sender, RoutedEventArgs e)
        {
            CurrentIpAdress = ipTextBox.Text;
            currentPort     = portTextBox.Text;



            senseManager = PXCMSenseManager.CreateInstance();
            senseManager.EnableStream(PXCMCapture.StreamType.STREAM_TYPE_COLOR, 640, 480, 60);
            senseManager.EnableFace();
            senseManager.Init();
            faceModule        = senseManager.QueryFace();
            faceConfiguration = faceModule.CreateActiveConfiguration();
            faceConfiguration.detection.isEnabled = true;
            expressionConfiguration = faceConfiguration.QueryExpressions();
            expressionConfiguration.Enable();
            expressionConfiguration.EnableAllExpressions();
            faceConfiguration.landmarks.isEnabled    = true;
            faceConfiguration.landmarks.numLandmarks = 78;
            faceConfiguration.EnableAllAlerts();
            faceConfiguration.ApplyChanges();
            captureProcess = new Thread(new ThreadStart(CaptureProcess));



            captureProcess.Start();
        }
Ejemplo n.º 3
0
        /**
         * Constructor of the model
         * It does all the important stuff to use our camera.  Its so FANCY !
         * Like enabling all important tracker(Hand, Face), the stream and builds up the configuration.
         * blib blub
         */
        public Model()
        {
            emotions["Anger"]    = 0;
            emotions["Fear"]     = 0;
            emotions["Disgust"]  = 0;
            emotions["Surprise"] = 0;
            emotions["Joy"]      = 0;
            emotions["Sadness"]  = 0;
            emotions["Contempt"] = 0;
            width        = 1920;
            height       = 1080;
            framerate    = 30;
            senseManager = PXCMSenseManager.CreateInstance();
            senseManager.EnableStream(PXCMCapture.StreamType.STREAM_TYPE_COLOR, width, height, framerate);
            // Enable Face detection
            senseManager.EnableFace();
            senseManager.Init();

            face       = senseManager.QueryFace();
            faceConfig = face.CreateActiveConfiguration();
            faceConfig.SetTrackingMode(PXCMFaceConfiguration.TrackingModeType.FACE_MODE_COLOR_PLUS_DEPTH);
            faceConfig.detection.isEnabled = true;
            faceConfig.pose.isEnabled      = true;
            faceConfig.ApplyChanges();
            faceConfig.Update();


            modules = new List <RSModule>();
        }
Ejemplo n.º 4
0
        public override void Start()
        {
            _cancellationToken = new CancellationTokenSource();
            _manager.EnableEmotion();
            _manager.EnableFace();

            using (var faceModule = _manager.QueryFace()) {
                using (var moduleConfiguration = faceModule.CreateActiveConfiguration()) {
                    moduleConfiguration.detection.maxTrackedFaces = 1;
                    var expressionCofig = moduleConfiguration.QueryExpressions();
                    expressionCofig.Enable();
                    expressionCofig.EnableAllExpressions();

                    var desc = new PXCMFaceConfiguration.RecognitionConfiguration.RecognitionStorageDesc();
                    desc.maxUsers     = 10;
                    desc.isPersistent = true;
                    var recognitionConfiguration = moduleConfiguration.QueryRecognition();
                    recognitionConfiguration.CreateStorage(StorageName, out desc);
                    recognitionConfiguration.UseStorage(StorageName);
                    recognitionConfiguration.SetRegistrationMode(PXCMFaceConfiguration.RecognitionConfiguration.RecognitionRegistrationMode.REGISTRATION_MODE_CONTINUOUS);

                    if (File.Exists(StorageFileName))
                    {
                        var bytes = File.ReadAllBytes(StorageFileName);
                        recognitionConfiguration.SetDatabaseBuffer(bytes);
                    }
                    recognitionConfiguration.Enable();
                    moduleConfiguration.ApplyChanges();
                }
            }

            _manager.EnableHand();
            using (var handModule = _manager.QueryHand()) {
                using (var handConfig = handModule.CreateActiveConfiguration()) {
                    //handConfig.EnableAllAlerts();
                    int numGestures = handConfig.QueryGesturesTotalNumber();
                    for (int i = 0; i < numGestures; i++)
                    {
                        string name;
                        handConfig.QueryGestureNameByIndex(i, out name);
                        Debug.WriteLine("Gestures: " + name);
                    }
                    handConfig.EnableAllGestures();
                    handConfig.SubscribeGesture(OnGesture);
                    handConfig.EnableTrackedJoints(true);
                    handConfig.ApplyChanges();
                }
            }
            Debug.WriteLine("Initializing Camera...");

            var status = _manager.Init();

            if (status != NoError)
            {
                throw new CameraException(status.ToString());
            }
            Task.Factory.StartNew(Loop,
                                  TaskCreationOptions.LongRunning,
                                  _cancellationToken.Token);
        }
Ejemplo n.º 5
0
        static void Main(string[] args)
        {
            try {
                // SenseManagerを生成する
                senseManager = PXCMSenseManager.CreateInstance();
                if (senseManager == null)
                {
                    throw new Exception("SenseManagerの生成に失敗しました");
                }

                // 何かしら有効にしないとInitが失敗するので適当に有効にする
                senseManager.EnableFace();


                // パイプラインを初期化する
                var sts = senseManager.Init();
                if (sts < pxcmStatus.PXCM_STATUS_NO_ERROR)
                {
                    throw new Exception("パイプラインの初期化に失敗しました");
                }

                // 使用可能なデバイスを列挙する
                enumDevice();
            }
            catch (Exception ex) {
                Console.WriteLine(ex.Message);
            }
        }
Ejemplo n.º 6
0
        private void InitializeFace()
        {
            // 顔検出を有効にする
            var sts = senceManager.EnableFace();

            if (sts < pxcmStatus.PXCM_STATUS_NO_ERROR)
            {
                throw new Exception("顔検出の有効化に失敗しました");
            }

            //顔検出器を生成する
            var faceModule = senceManager.QueryFace();

            //顔検出のプロパティを取得
            PXCMFaceConfiguration config = faceModule.CreateActiveConfiguration();

            config.SetTrackingMode(PXCMFaceConfiguration.TrackingModeType.FACE_MODE_COLOR_PLUS_DEPTH);


            config.ApplyChanges();

            // パイプラインを初期化する
            pxcmStatus ret = senceManager.Init();

            if (ret < pxcmStatus.PXCM_STATUS_NO_ERROR)
            {
                throw new Exception("初期化に失敗しました");
            }

            // デバイス情報の取得
            PXCMCapture.Device device = senceManager.QueryCaptureManager().QueryDevice();
            if (device == null)
            {
                throw new Exception("deviceの生成に失敗しました");
            }


            // ミラー表示にする
            device.SetMirrorMode(PXCMCapture.Device.MirrorMode.MIRROR_MODE_HORIZONTAL);

            PXCMCapture.DeviceInfo deviceInfo;
            device.QueryDeviceInfo(out deviceInfo);
            if (deviceInfo.model == PXCMCapture.DeviceModel.DEVICE_MODEL_IVCAM)
            {
                device.SetDepthConfidenceThreshold(1);
                device.SetIVCAMFilterOption(6);
                device.SetIVCAMMotionRangeTradeOff(21);
            }


            config.detection.isEnabled       = true;
            config.detection.maxTrackedFaces = DETECTION_MAXFACES;
            config.QueryExpressions().Enable();                                         //追加:顔の姿勢情報取得を可能にする
            config.QueryExpressions().EnableAllExpressions();                           //追加:すべての表出情報を取得可能にする
            config.QueryExpressions().properties.maxTrackedFaces = EXPRESSION_MAXFACES; //追加:二人までの表出を取得可能に設定する
            config.ApplyChanges();

            faceData = faceModule.CreateOutput();
        }
Ejemplo n.º 7
0
        /// <summary>
        /// Starts this instance.
        /// </summary>
        public void Start()
        {
            if (_sm != null)
            {
                throw new ResearchException("Camera is already started.");
            }

            _sm = PXCMSenseManager.CreateInstance();

            // Configure face detection.
            if (EnableFace)
            {
                _sm.EnableFace();
                var faceModule = _sm.QueryFace();
                using (PXCMFaceConfiguration faceConfig = faceModule.CreateActiveConfiguration())
                {
                    faceConfig.EnableAllAlerts();
                    faceConfig.pose.isEnabled       = true;
                    faceConfig.pose.maxTrackedFaces = 4;

                    if (EnableExpression)
                    {
                        PXCMFaceConfiguration.ExpressionsConfiguration expression = faceConfig.QueryExpressions();
                        expression.Enable();
                        expression.EnableAllExpressions();
                        faceConfig.ApplyChanges();
                    }
                }
            }

            if (EnableEmotion)
            {
                // Configure emotion detection.
                _sm.EnableEmotion();
            }

            if (EnableStreaming)
            {
                // Configure streaming.
                _sm.EnableStream(PXCMCapture.StreamType.STREAM_TYPE_COLOR, 640, 480);
                //    _sm.EnableStream(PXCMCapture.StreamType.STREAM_TYPE_DEPTH, 640, 480);
                //    _sm.EnableStream(PXCMCapture.StreamType.STREAM_TYPE_IR, 640, 480);
            }

            // Event handler for data callbacks.
            var handler = new PXCMSenseManager.Handler {
                onModuleProcessedFrame = OnModuleProcessedFrame
            };

            _sm.Init(handler);

            // GO.
            Debug.WriteLine("{0} Starting streaming.", Time());
            _sm.StreamFrames(false);



            //Debug.WriteLine("{0} End streaming.", Time());
        }
Ejemplo n.º 8
0
        private void ConfigureRealSense()
        {
            try
            {
                // Create the SenseManager instance
                sm = PXCMSenseManager.CreateInstance();

                // Enable the color stream
                sm.EnableStream(PXCMCapture.StreamType.STREAM_TYPE_COLOR, ImageWidth, ImageHeight, 30);

                // Enable person tracking
                sm.EnablePersonTracking();
                personModule = sm.QueryPersonTracking();
                PXCMPersonTrackingConfiguration personConfig = personModule.QueryConfiguration();
                personConfig.SetTrackedAngles(PXCMPersonTrackingConfiguration.TrackingAngles.TRACKING_ANGLES_ALL);

                // Enable skeleton tracking - not supported on r200?
                //PXCMPersonTrackingConfiguration.SkeletonJointsConfiguration skeletonConfig = personConfig.QuerySkeletonJoints();
                //skeletonConfig.Enable();

                // Enable the face module
                sm.EnableFace();
                PXCMFaceModule        faceModule = sm.QueryFace();
                PXCMFaceConfiguration faceConfig = faceModule.CreateActiveConfiguration();
                faceConfig.SetTrackingMode(PXCMFaceConfiguration.TrackingModeType.FACE_MODE_COLOR_PLUS_DEPTH);
                faceConfig.strategy = PXCMFaceConfiguration.TrackingStrategyType.STRATEGY_APPEARANCE_TIME;
                faceConfig.detection.maxTrackedFaces = 1;
                faceConfig.ApplyChanges();

                sm.EnableBlob();
                PXCMBlobModule        blobModule = sm.QueryBlob();
                PXCMBlobConfiguration blobConfig = blobModule.CreateActiveConfiguration();
                blobConfig.SetMaxBlobs(4);       // 4 is the max
                blobConfig.SetMaxDistance(2000); // in mm's
                blobConfig.ApplyChanges();

                //initialize the SenseManager
                sm.Init();
                faceData = faceModule.CreateOutput();
                blobData = blobModule.CreateOutput();

                // Mirror the image
                sm.QueryCaptureManager().QueryDevice().SetMirrorMode(PXCMCapture.Device.MirrorMode.MIRROR_MODE_HORIZONTAL);

                // Release resources
                personConfig.Dispose();
                faceConfig.Dispose();
                faceModule.Dispose();
                blobConfig.Dispose();
                blobModule.Dispose();
            }
            catch (Exception)
            {
                MessageBox.Show("Unable to configure the RealSense camera. Please make sure a R200 camera is connected.", "System Error");
                throw;
            }
        }
Ejemplo n.º 9
0
        private void ConfigureRealSense(PXCMSession session)
        {
            PXCMFaceModule        faceModule;
            PXCMFaceConfiguration faceConfig;

            // Start the SenseManager and session
            senseManager = session.CreateSenseManager();

            // Enable the color stream
            senseManager.EnableStream(PXCMCapture.StreamType.STREAM_TYPE_COLOR, 640, 480, 30);

            // Enable the face module
            senseManager.EnableFace();
            faceModule = senseManager.QueryFace();
            faceConfig = faceModule.CreateActiveConfiguration();

            // Configure for 3D face tracking (if camera cannot support depth it will revert to 2D tracking)
            faceConfig.SetTrackingMode(PXCMFaceConfiguration.TrackingModeType.FACE_MODE_COLOR_PLUS_DEPTH);

            // Enable landmarks detection
            faceConfig.landmarks.isEnabled = true;

            // Enable pose detection
            faceConfig.pose.isEnabled = true;

            // Enable facial recognition
            recogConfig = faceConfig.QueryRecognition();
            recogConfig.Enable();

            // Create a recognition database storage
            PXCMFaceConfiguration.RecognitionConfiguration.RecognitionStorageDesc recognitionDesc = new PXCMFaceConfiguration.RecognitionConfiguration.RecognitionStorageDesc();
            recognitionDesc.maxUsers = MAX_USERS;
            recogConfig.CreateStorage(DB_NAME, out recognitionDesc);
            recogConfig.UseStorage(DB_NAME);
            isDBLoaded = LoadDB();
            recogConfig.SetRegistrationMode(PXCMFaceConfiguration.RecognitionConfiguration.RecognitionRegistrationMode.REGISTRATION_MODE_CONTINUOUS);

            // Apply changes and initialize
            faceConfig.ApplyChanges();
            senseManager.Init();
            faceData = faceModule.CreateOutput();

            // Mirror image
            senseManager.QueryCaptureManager().QueryDevice().SetMirrorMode(PXCMCapture.Device.MirrorMode.MIRROR_MODE_HORIZONTAL);

            //IVCam Setting
            senseManager.QueryCaptureManager().QueryDevice().SetIVCAMFilterOption(7);
            senseManager.QueryCaptureManager().QueryDevice().SetIVCAMAccuracy(PXCMCapture.Device.IVCAMAccuracy.IVCAM_ACCURACY_COARSE);
            senseManager.QueryCaptureManager().QueryDevice().SetIVCAMLaserPower(16);
            senseManager.QueryCaptureManager().QueryDevice().SetIVCAMMotionRangeTradeOff(100);

            // Release resources
            faceConfig.Dispose();
            faceModule.Dispose();
        }
Ejemplo n.º 10
0
        // Use this for initialization
        void Start()
        {
            // Creates an instance of the sense manager to be called later
            session = PXCMSenseManager.CreateInstance();

            //Output an error if there is no instance of the sense manager
            if (session == null)
            {
                Debug.LogError("SenseManager Init Failed!");
            }

            // Enables hand tracking
            sts = session.EnableHand();
            handAnalyzer = session.QueryHand();

            sts2 = session.EnableFace();
            faceAnalyzer = session.QueryFace();

            if (sts != pxcmStatus.PXCM_STATUS_NO_ERROR)
                Debug.LogError("PXCSenseManager.EnableHand: " + sts);

            if (sts2 != pxcmStatus.PXCM_STATUS_NO_ERROR)
                Debug.LogError("PXCSenseManager.EnableFace: " + sts2);

            // Creates the session
            sts = session.Init();

            sts2 = session.Init();

            if (sts != pxcmStatus.PXCM_STATUS_NO_ERROR)
                Debug.LogError("PXCSenseManager.Init: " + sts);

            if (sts2 != pxcmStatus.PXCM_STATUS_NO_ERROR)
                Debug.LogError("PXCSenseManager.Init: " + sts2);

            // Creates a hand config for future data
            PXCMHandConfiguration handconfig = handAnalyzer.CreateActiveConfiguration();

            PXCMFaceConfiguration faceconfig = faceAnalyzer.CreateActiveConfiguration();

            //If there is handconfig instance
            if (handconfig != null)
            {
                handconfig.EnableAllAlerts();
                handconfig.ApplyChanges();
                handconfig.Dispose();
            }
            if (faceconfig != null)
            {
                faceconfig.EnableAllAlerts();
                faceconfig.ApplyChanges();
                faceconfig.Dispose();
            }
        }
Ejemplo n.º 11
0
        private void Window_Loaded(object sender, RoutedEventArgs e)
        {
            this.DataContext = this;

            SenseManager = PXCMSenseManager.CreateInstance();

            SenseManager.EnableStream(PXCMCapture.StreamType.STREAM_TYPE_COLOR, 1280, 720);
            SenseManager.EnableFace();

            InitializeCamera();
            ConfigurePollingTask();
        }
        private void ConfigureRealSense()
        {
            PXCMFaceModule        faceModule;
            PXCMFaceConfiguration faceConfig;

            // Start the SenseManager and session
            senseManager = PXCMSenseManager.CreateInstance();

            // Enable the color stream
            senseManager.EnableStream(PXCMCapture.StreamType.STREAM_TYPE_COLOR, 640, 480, 30);
            //senseManager.EnableStream(PXCMCapture.StreamType.STREAM_TYPE_COLOR, 550, 550, 30);

            // Enable the face module
            senseManager.EnableFace();
            //senseManager.EnableHand();

            faceModule = senseManager.QueryFace();
            faceConfig = faceModule.CreateActiveConfiguration();

            // Configure for 3D face tracking (if camera cannot support depth it will revert to 2D tracking)
            faceConfig.SetTrackingMode(PXCMFaceConfiguration.TrackingModeType.FACE_MODE_COLOR_PLUS_DEPTH);

            expressionConfiguration = faceConfig.QueryExpressions();
            expressionConfiguration.Enable();
            expressionConfiguration.EnableAllExpressions();


            // Enable facial recognition
            recognitionConfig = faceConfig.QueryRecognition();
            recognitionConfig.Enable();

            // Create a recognition database
            PXCMFaceConfiguration.RecognitionConfiguration.RecognitionStorageDesc recognitionDesc = new PXCMFaceConfiguration.RecognitionConfiguration.RecognitionStorageDesc();
            recognitionDesc.maxUsers = DatabaseUsers;
            recognitionConfig.CreateStorage(DatabaseName, out recognitionDesc);
            recognitionConfig.UseStorage(DatabaseName);
            LoadDatabaseFromFile();
            recognitionConfig.SetRegistrationMode(PXCMFaceConfiguration.RecognitionConfiguration.RecognitionRegistrationMode.REGISTRATION_MODE_CONTINUOUS);

            // Apply changes and initialize
            faceConfig.ApplyChanges();
            senseManager.Init();
            faceData = faceModule.CreateOutput();

            // Mirror image
            senseManager.QueryCaptureManager().QueryDevice().SetMirrorMode(PXCMCapture.Device.MirrorMode.MIRROR_MODE_HORIZONTAL);

            // Release resources
            faceConfig.Dispose();
            faceModule.Dispose();
        }
Ejemplo n.º 13
0
    private PXCMFaceModule faceAnalyzer; //FaceModule Instance


    /// <summary>
    /// Use this for initialization
    /// Unity function called on the frame when a script is enabled
    /// just before any of the Update methods is called the first time.
    /// </summary>
    void Start()
    {
        faceRenderer = gameObject.GetComponent <FaceRenderer>();

        /* Initialize a PXCMSenseManager instance */
        psm = PXCMSenseManager.CreateInstance();
        if (psm == null)
        {
            Debug.LogError("SenseManager Initialization Failed");
            return;
        }

        /* Enable the color stream of size 640x480 */
        psm.EnableStream(PXCMCapture.StreamType.STREAM_TYPE_COLOR, 640, 480);

        /* Enable the face tracking module*/
        sts = psm.EnableFace();
        if (sts != pxcmStatus.PXCM_STATUS_NO_ERROR)
        {
            Debug.LogError("PXCSenseManager.EnableFace: " + sts);
        }

        /* Retrieve an instance of face to configure */
        faceAnalyzer = psm.QueryFace();
        if (faceAnalyzer == null)
        {
            Debug.LogError("PXCSenseManager.QueryFace");
        }

        /* Initialize the execution pipeline */
        sts = psm.Init();
        if (sts != pxcmStatus.PXCM_STATUS_NO_ERROR)
        {
            Debug.LogError("PXCMSenseManager.Init Failed");
            OnDisable();
            return;
        }

        /* Retrieve a PXCMFaceConfiguration instance from a face to enable Gestures and Alerts */
        PXCMFaceConfiguration config = faceAnalyzer.CreateActiveConfiguration();

        config.detection.isEnabled = true;         // 3D detection is the default tracking mode.
        config.landmarks.isEnabled = true;
        config.pose.isEnabled      = true;
        config.QueryExpressions().Enable();
        config.QueryExpressions().EnableExpression(PXCMFaceData.ExpressionsData.FaceExpression.EXPRESSION_MOUTH_OPEN);
        config.EnableAllAlerts();
        config.ApplyChanges();
        config.Dispose();
    }
Ejemplo n.º 14
0
        private void StartRealSense()
        {
            bool useHead = bool.Parse(ConfigurationManager.AppSettings["UseHead"]);

            // Instantiate and initialize the SenseManager
            senseManager = PXCMSenseManager.CreateInstance();

            // Configure the Hand Module
            if (useHead)
            {
                senseManager.EnableFace();

                face       = senseManager.QueryFace();
                faceConfig = face.CreateActiveConfiguration();
                faceConfig.detection.isEnabled = true;
                faceConfig.QueryExpressions().Enable();

                faceConfig.ApplyChanges();
            }
            else
            {
                // Enable cursor tracking
                senseManager.EnableHand();

                // Get an instance of the hand cursor module
                hand = senseManager.QueryHand();

                // Get an instance of the cursor configuration
                var cursorConfig = hand.CreateActiveConfiguration();

                // Make configuration changes and apply them
                cursorConfig.DisableAllAlerts();
                cursorConfig.EnableTrackedJoints(true);
                cursorConfig.EnableStabilizer(true);
                cursorConfig.ApplyChanges();
            }

            senseManager.Init();

            // Create an instance of PXCMSmoother
            senseManager.session.CreateImpl <PXCMSmoother>(out smoother);
            smoother2D  = smoother.Create2DQuadratic(.5F);
            smoother2D2 = smoother.Create2DQuadratic(1);

            // Start the worker thread
            processingThread = new Thread(new ThreadStart(ProcessingThread));
            processingThread.Start();
        }
Ejemplo n.º 15
0
        private void ConfigureRealSense()
        {
            my.init();
            child.Start();
            try
            {
                // Create the SenseManager instance
                sm = PXCMSenseManager.CreateInstance();

                // Enable the color stream
                sm.EnableStream(PXCMCapture.StreamType.STREAM_TYPE_COLOR, ImageWidth, ImageHeight, 30);

                // Enable person tracking
                sm.EnablePersonTracking();
                personModule = sm.QueryPersonTracking();
                PXCMPersonTrackingConfiguration personConfig = personModule.QueryConfiguration();
                personConfig.SetTrackedAngles(PXCMPersonTrackingConfiguration.TrackingAngles.TRACKING_ANGLES_ALL);

                // Enable the face module
                sm.EnableFace();
                PXCMFaceModule        faceModule = sm.QueryFace();
                PXCMFaceConfiguration faceConfig = faceModule.CreateActiveConfiguration();
                faceConfig.SetTrackingMode(PXCMFaceConfiguration.TrackingModeType.FACE_MODE_COLOR_PLUS_DEPTH);
                faceConfig.strategy = PXCMFaceConfiguration.TrackingStrategyType.STRATEGY_APPEARANCE_TIME;
                faceConfig.detection.maxTrackedFaces = 1;

                // Apply changes and initialize the SenseManager
                faceConfig.ApplyChanges();
                sm.Init();
                faceData = faceModule.CreateOutput();

                // Mirror the image
                sm.QueryCaptureManager().QueryDevice().SetMirrorMode(PXCMCapture.Device.MirrorMode.MIRROR_MODE_HORIZONTAL);

                // Release resources
                personConfig.Dispose();
                faceConfig.Dispose();
                faceModule.Dispose();
            }
            catch (Exception)
            {
                // For the sake of brevity we're not doing extensive exception handling in this code sample,
                // simply give a hint that the camera is not connected, and then throw the exception
                MessageBox.Show("Unable to configure the RealSense camera. Please make sure a R200 camera is connected.", "System Error");
                throw;
            }
        }
Ejemplo n.º 16
0
        /**
         * Initialise View and start updater Thread
         */
        public CameraView(int width, int height, int framterate, List <RSModule> mods)
        {
            // Initialise Stuff, turn Camera on
            senseManager = PXCMSenseManager.CreateInstance();
            senseManager.EnableStream(PXCMCapture.StreamType.STREAM_TYPE_COLOR, width, height, framterate);
            // Enable Face detection
            senseManager.EnableFace();
            modules = mods;
            modules.ForEach(delegate(RSModule rsm)
            {
                rsm.Init(senseManager);
            });
            senseManager.Init();

            session = PXCMSession.CreateInstance();

            if (session == null) // Something went wrong, session could not be initialised
            {
                Console.WriteLine("F**k!");
                Application.Exit();
                return;
            }

            iv = session.QueryVersion();
            String versionString = "v" + iv.major + "." + iv.minor;

            Console.WriteLine(versionString);
            Text = versionString;


            pb = new PictureBox();
            // Set size
            pb.Bounds = new Rectangle(0, 0, width, height);
            // init UI
            this.Bounds = new Rectangle(0, 0, width, height);
            this.Controls.Add(pb);
            FormClosed += new FormClosedEventHandler(Quit);
            this.Show();
            // Start Updater Thread
            updaterThread = new Thread(this.update);
            updaterThread.Start();
        }
Ejemplo n.º 17
0
        public Model()
        {
            width        = 640;
            height       = 480;
            framerate    = 30;
            senseManager = PXCMSenseManager.CreateInstance();
            senseManager.EnableStream(PXCMCapture.StreamType.STREAM_TYPE_COLOR, width, height, framerate);
            // Enable Face detection
            senseManager.EnableFace();
            senseManager.EnableHand();
            senseManager.Init();

            face       = senseManager.QueryFace();
            faceConfig = face.CreateActiveConfiguration();
            faceConfig.SetTrackingMode(PXCMFaceConfiguration.TrackingModeType.FACE_MODE_COLOR);
            faceConfig.detection.isEnabled = true;
            faceConfig.QueryExpressions();
            PXCMFaceConfiguration.ExpressionsConfiguration expc = faceConfig.QueryExpressions();
            expc.Enable();
            expc.EnableAllExpressions();
            faceConfig.ApplyChanges();
            faceConfig.Update();

            //faceData = face.CreateOutput();
            //faceData.Update();

            hand = senseManager.QueryHand();
            PXCMHandConfiguration config = hand.CreateActiveConfiguration();

            config.SetTrackingMode(PXCMHandData.TrackingModeType.TRACKING_MODE_FULL_HAND);
            config.ApplyChanges();
            config.Update();
            //handData = hand.CreateOutput();
            //handData.Update();

            modules = new List <RSModule>();
        }
Ejemplo n.º 18
0
        public FaceTrackerThread()
        {
            running = true;

            senseManager = PXCMSenseManager.CreateInstance();
            senseManager.EnableStream(PXCMCapture.StreamType.STREAM_TYPE_COLOR, 640, 480, 60);
            senseManager.EnableFace();
            senseManager.Init();
            face = senseManager.QueryFace();
            faceConfiguration = face.CreateActiveConfiguration();
            faceConfiguration.detection.isEnabled = true;

            expressionConfiguration = faceConfiguration.QueryExpressions();
            expressionConfiguration.Enable();
            expressionConfiguration.EnableAllExpressions();

            //Gaze detection
            gazec = faceConfiguration.QueryGaze();
            gazec.isEnabled = true;
            faceConfiguration.ApplyChanges();

            faceConfiguration.EnableAllAlerts();
            faceConfiguration.ApplyChanges();
        }
Ejemplo n.º 19
0
        private void ConfigureRealSense()
        {
            PXCMFaceModule        faceModule;
            PXCMFaceConfiguration faceConfig;

            // Start the SenseManager and session
            senseManager = PXCMSenseManager.CreateInstance();

            // Enable the color stream
            senseManager.EnableStream(PXCMCapture.StreamType.STREAM_TYPE_COLOR, 640, 480, 30);

            // Enable the face module
            senseManager.EnableFace();
            faceModule = senseManager.QueryFace();
            faceConfig = faceModule.CreateActiveConfiguration();

            // Configure for 3D face tracking (if camera cannot support depth it will revert to 2D tracking)
            faceConfig.SetTrackingMode(PXCMFaceConfiguration.TrackingModeType.FACE_MODE_IR);

            // Enable facial recognition
            recognitionConfig = faceConfig.QueryRecognition();
            recognitionConfig.Enable();


            // Apply changes and initialize
            faceConfig.ApplyChanges();
            senseManager.Init();
            faceData = faceModule.CreateOutput();

            // Mirror image
            senseManager.QueryCaptureManager().QueryDevice().SetMirrorMode(PXCMCapture.Device.MirrorMode.MIRROR_MODE_HORIZONTAL);

            // Release resources
            faceConfig.Dispose();
            faceModule.Dispose();
        }
Ejemplo n.º 20
0
        /**
         * Constructor of the model
         * It does all the important stuff to use our camera.  Its so FANCY !
         * Like enabling all important tracker(Hand, Face), the stream and builds up the configuration.
         */
        public Model(bool s)
        {
            stream = s;
            emotions[Emotion.ANGER]    = 0;
            emotions[Emotion.CONTEMPT] = 0;
            emotions[Emotion.DISGUST]  = 0;
            emotions[Emotion.FEAR]     = 0;
            emotions[Emotion.JOY]      = 0;
            emotions[Emotion.SADNESS]  = 0;
            emotions[Emotion.SURPRISE] = 0;

            if (stream)
            {
                width        = 1920;
                height       = 1080;
                framerate    = 30;
                senseManager = PXCMSenseManager.CreateInstance();
                senseManager.EnableStream(PXCMCapture.StreamType.STREAM_TYPE_COLOR, width, height, framerate);
                // Enable Face detection
                senseManager.EnableFace();
                bool couldInit = senseManager.Init().IsSuccessful();
                if (!couldInit)
                {
                    MessageBox.Show("Could not connect to the hardware! Make sure you have the camera-drivers installed.", "Aww, dang");
                    Environment.Exit(1);
                }
                face       = senseManager.QueryFace();
                faceConfig = face.CreateActiveConfiguration();
                faceConfig.SetTrackingMode(PXCMFaceConfiguration.TrackingModeType.FACE_MODE_COLOR_PLUS_DEPTH);
                faceConfig.detection.isEnabled = true;
                faceConfig.pose.isEnabled      = true;
                faceConfig.ApplyChanges();
                faceConfig.Update();
                modules = new List <RSModule>();
            }
        }
Ejemplo n.º 21
0
        static void Main( string[] args )
        {
            try {
                // SenseManagerを生成する
                senseManager = PXCMSenseManager.CreateInstance();
                if ( senseManager == null ) {
                    throw new Exception( "SenseManagerの生成に失敗しました" );
                }

                // 何かしら有効にしないとInitが失敗するので適当に有効にする
                senseManager.EnableFace();

                // パイプラインを初期化する
                var sts = senseManager.Init();
                if ( sts < pxcmStatus.PXCM_STATUS_NO_ERROR ) {
                    throw new Exception( "パイプラインの初期化に失敗しました" );
                }

                // 使用可能なデバイスを列挙する
                enumDevice();

            }
            catch ( Exception ex ) {
                Console.WriteLine( ex.Message );
            }
        }
Ejemplo n.º 22
0
        private void ConfigureRealSense()
        {
            PXCMFaceModule faceModule;



            // Start the SenseManager and session
            senseManager = PXCMSenseManager.CreateInstance();
            senseManager.captureManager.SetFileName("recorded_video.wm", true);
            senseManager.EnableStream(PXCMCapture.StreamType.STREAM_TYPE_COLOR, 640, 480, 0);

            senseManager.Init();

            senseManager.captureManager.SetRealtime(false);
            senseManager.captureManager.SetPause(true);

            // Enable the color stream
            //senseManager.EnableStream(PXCMCapture.StreamType.STREAM_TYPE_COLOR, 640, 480, 60);
            //60 0r 0 for fps?

            // Enable the face module
            senseManager.EnableFace();
            faceModule = senseManager.QueryFace();
            faceConfig = faceModule.CreateActiveConfiguration();
            faceConfig.detection.isEnabled = true;
            expressionConfig = faceConfig.QueryExpressions();
            expressionConfig.Enable();
            expressionConfig.EnableAllExpressions();
            faceConfig.EnableAllAlerts();



            // Configure for 3D face tracking (if camera cannot support depth it will revert to 2D tracking)
            faceConfig.SetTrackingMode(PXCMFaceConfiguration.TrackingModeType.FACE_MODE_COLOR_PLUS_DEPTH);

            // Enable facial recognition
            recognitionConfig = faceConfig.QueryRecognition();
            recognitionConfig.Enable();


            // Create a recognition database
            PXCMFaceConfiguration.RecognitionConfiguration.RecognitionStorageDesc recognitionDesc = new PXCMFaceConfiguration.RecognitionConfiguration.RecognitionStorageDesc();
            recognitionDesc.maxUsers = DatabaseUsers;
            recognitionConfig.CreateStorage(DatabaseName, out recognitionDesc);
            recognitionConfig.UseStorage(DatabaseName);
            LoadDatabaseFromFile();
            recognitionConfig.SetRegistrationMode(PXCMFaceConfiguration.RecognitionConfiguration.RecognitionRegistrationMode.REGISTRATION_MODE_CONTINUOUS);

            // Apply changes and initialize
            faceConfig.ApplyChanges();
            senseManager.Init();
            faceData = faceModule.CreateOutput();
            int numFaces = faceData.QueryNumberOfDetectedFaces();

            Console.WriteLine("number of detected faces", numFaces);

            // Mirror image
            senseManager.QueryCaptureManager().QueryDevice().SetMirrorMode(PXCMCapture.Device.MirrorMode.MIRROR_MODE_HORIZONTAL);

            // Release resources
            faceConfig.Dispose();
            faceModule.Dispose();
        }
Ejemplo n.º 23
0
        public MainWindow()
        {
            InitializeComponent();

            //set the current date and time
            currentDateTime = DateTime.Now.ToString("yyyyMMddHHmmssfff");
            //set total timer count to 0 and init vars
            highPerformanceTimer = new HiPerfTimer();
            totalHighPerfTimeElapsed = 0;
            numLinesWritten = 0; //set the total number of lines written to 0 so we can track when to start the timer

            //init pipe stuff
            pipeClient = new MyClient(PIPE_NAME);
            pipeClient.SendMessage("I Am Intel RealSense");

            //Debug.WriteLine("Server Ready");

            //initialise combobox
            populateComboBox();
            //init the exprToDisplay global var
            exprToDisplay = "";

            //Work on the file

            //create paths
            string dirToCreate = "data";
            string dirToCreateFull = System.IO.Path.GetFullPath(dirToCreate);
            Directory.CreateDirectory(dirToCreateFull);

            dirToCreate = "video";
            dirToCreateFull = System.IO.Path.GetFullPath(dirToCreate);
            Directory.CreateDirectory(dirToCreateFull);

            //create the csv file to write to
            file = new StreamWriter("data/" + currentDateTime + "data" + ".csv");

            //initialise global expressions array - faster to add the keys here?
            var enumListMain = Enum.GetNames(typeof(PXCMFaceData.ExpressionsData.FaceExpression));
            exprTable = new Hashtable();
            string initLine = "";

            //Add the column schema

            //Initial line: timestamp and high prec time
            initLine += "TIMESTAMP,HIGH_PRECISION_TIME_FROM_START,STIMCODE";

            //add all the expression data columns
            for (int i = 0; i < enumListMain.Length; i++)
            {
                exprTable.Add(enumListMain[i], 0);
                initLine += "," + enumListMain[i];

            }

            //add the bounding rectangle column
            initLine += "," + "BOUNDING_RECTANGLE_HEIGHT" + "," + "BOUNDING_RECTANGLE_WIDTH" + "," + "BOUNDING_RECTANGLE_X" + "," + "BOUNDING_RECTANGLE_Y";
            //add the average depth column
            initLine += "," + "AVERAGE_DEPTH";
            //add landmark points column
            for (int i = 0; i < LANDMARK_POINTS_TOTAL; i++)
            {
                initLine += "," + "LANDMARK_" + i + "_X";
                initLine += "," + "LANDMARK_" + i + "_Y";
            }
            //add euler angles columns
            initLine += "," + "EULER_ANGLE_PITCH" + "," + "EULER_ANGLE_ROLL" + "," + "EULER_ANGLE_YAW";
            initLine += "," + "QUATERNION_W" + "," + "QUATERNION_X" + "," + "QUATERNION_Y" + "," + "QUATERNION_Z";

            //write the initial row to the file
            file.WriteLine(initLine);

            //configure the camera mode selection box
            cbCameraMode.Items.Add("Color");
            cbCameraMode.Items.Add("IR");
            cbCameraMode.Items.Add("Depth");
            //configure initial camera mode
            cameraMode = "Color";

            //initialise global vars

            numFacesDetected = 0;

            handWaving = false;
            handTrigger = false;
            handResetTimer = 0;

            lEyeClosedIntensity = 0;
            lEyeClosed = false;
            lEyeClosedTrigger = false;
            lEyeClosedResetTimer = 0;

            rEyeClosed = false;
            rEyeClosedTrigger = false;
            rEyeClosedResetTimer = 0;
            rEyeClosedIntensity = 0;

            emotionEvidence = 0;

            blinkTrigger = false;
            blinkResetTimer = 0;

            //global fps vars
            prevTime = 0;
            stopwatch = new Stopwatch();

            // Instantiate and initialize the SenseManager
            senseManager = PXCMSenseManager.CreateInstance();
            if (senseManager == null)
            {
                MessageBox.Show("Cannot initialise sense manager: closing in 20s, report to Sriram");
                Thread.Sleep(20000);
                Environment.Exit(1);
            }

            //capture samples
            senseManager.captureManager.SetFileName("video/" + currentDateTime + ".raw", true);
            //Enable color stream
            senseManager.EnableStream(PXCMCapture.StreamType.STREAM_TYPE_COLOR, STREAM_WIDTH, STREAM_HEIGHT, STREAM_FPS);
            senseManager.EnableStream(PXCMCapture.StreamType.STREAM_TYPE_DEPTH, STREAM_WIDTH, STREAM_HEIGHT, STREAM_FPS);
            senseManager.EnableStream(PXCMCapture.StreamType.STREAM_TYPE_IR, STREAM_WIDTH, STREAM_HEIGHT, STREAM_FPS);
            //Enable face and hand tracking AND EMOTION TRACKING
            senseManager.EnableHand();
            senseManager.EnableFace();
            senseManager.EnableEmotion();

            //Initialise the senseManager - begin collecting data
            senseManager.Init();

            // Configure the Hand Module
            hand = senseManager.QueryHand();
            handConfig = hand.CreateActiveConfiguration();
            handConfig.EnableGesture("wave");
            handConfig.EnableAllAlerts();
            handConfig.ApplyChanges();

            //Configure the Face Module
            face = senseManager.QueryFace();
            faceConfig = face.CreateActiveConfiguration();
            faceConfig.EnableAllAlerts();
            faceConfig.detection.isEnabled = true; //enables querydetection function to retrieve face loc data
            faceConfig.detection.maxTrackedFaces = 1; //MAXIMUM TRACKING - 1 FACE
            faceConfig.ApplyChanges();
            //Configure the sub-face-module Expressions
            exprConfig = faceConfig.QueryExpressions();
            exprConfig.Enable();
            exprConfig.EnableAllExpressions();
            faceConfig.ApplyChanges();

            // Start the worker thread that processes the captured data in real-time
            processingThread = new Thread(new ThreadStart(ProcessingThread));
            processingThread.Start();
        }
Ejemplo n.º 24
0
        public void SimplePipeline()
        {
            PXCMSenseManager pp = m_form.Session.CreateSenseManager();

            if (pp == null)
            {
                throw new Exception("PXCMSenseManager null");
            }

            PXCMCaptureManager captureMgr = pp.captureManager;

            if (captureMgr == null)
            {
                throw new Exception("PXCMCaptureManager null");
            }

            var selectedRes = m_form.GetCheckedColorResolution();

            if (selectedRes != null && !m_form.IsInPlaybackState())
            {
                // Set active camera
                PXCMCapture.DeviceInfo deviceInfo;
                m_form.Devices.TryGetValue(m_form.GetCheckedDevice(), out deviceInfo);
                captureMgr.FilterByDeviceInfo(m_form.GetCheckedDeviceInfo());

                // activate filter only live/record mode , no need in playback mode
                var set = new PXCMCapture.Device.StreamProfileSet
                {
                    color =
                    {
                        frameRate = selectedRes.Item2,
                        imageInfo =
                        {
                            format = selectedRes.Item1.format,
                            height = selectedRes.Item1.height,
                            width  = selectedRes.Item1.width
                        }
                    }
                };

                if (m_form.IsPulseEnabled() && (set.color.imageInfo.width < 1280 || set.color.imageInfo.height < 720))
                {
                    captureMgr.FilterByStreamProfiles(PXCMCapture.StreamType.STREAM_TYPE_COLOR, 1280, 720, 0);
                }
                else
                {
                    captureMgr.FilterByStreamProfiles(set);
                }
            }

            // Set Source & Landmark Profile Index
            if (m_form.IsInPlaybackState())
            {
                //pp.captureManager.FilterByStreamProfiles(null);
                captureMgr.SetFileName(m_form.GetFileName(), false);
                captureMgr.SetRealtime(false);
            }
            else if (m_form.GetRecordState())
            {
                captureMgr.SetFileName(m_form.GetFileName(), true);
            }

            // Set Module
            pp.EnableFace();
            PXCMFaceModule faceModule = pp.QueryFace();

            if (faceModule == null)
            {
                Debug.Assert(faceModule != null);
                return;
            }

            PXCMFaceConfiguration moduleConfiguration = faceModule.CreateActiveConfiguration();

            if (moduleConfiguration == null)
            {
                Debug.Assert(moduleConfiguration != null);
                return;
            }

            var checkedProfile = m_form.GetCheckedProfile();
            var mode           = m_form.FaceModesMap.First(x => x.Value == checkedProfile).Key;

            moduleConfiguration.SetTrackingMode(mode);

            moduleConfiguration.strategy = PXCMFaceConfiguration.TrackingStrategyType.STRATEGY_RIGHT_TO_LEFT;

            moduleConfiguration.detection.maxTrackedFaces = m_form.NumDetection;
            moduleConfiguration.landmarks.maxTrackedFaces = m_form.NumLandmarks;
            moduleConfiguration.pose.maxTrackedFaces      = m_form.NumPose;

            PXCMFaceConfiguration.ExpressionsConfiguration econfiguration = moduleConfiguration.QueryExpressions();
            if (econfiguration == null)
            {
                throw new Exception("ExpressionsConfiguration null");
            }
            econfiguration.properties.maxTrackedFaces = m_form.NumExpressions;

            econfiguration.EnableAllExpressions();
            moduleConfiguration.detection.isEnabled = m_form.IsDetectionEnabled();
            moduleConfiguration.landmarks.isEnabled = m_form.IsLandmarksEnabled();
            moduleConfiguration.pose.isEnabled      = m_form.IsPoseEnabled();
            if (m_form.IsExpressionsEnabled())
            {
                econfiguration.Enable();
            }

            PXCMFaceConfiguration.PulseConfiguration pulseConfiguration = moduleConfiguration.QueryPulse();
            if (pulseConfiguration == null)
            {
                throw new Exception("pulseConfiguration null");
            }

            pulseConfiguration.properties.maxTrackedFaces = m_form.NumPulse;
            if (m_form.IsPulseEnabled())
            {
                pulseConfiguration.Enable();
            }

            qrecognition = moduleConfiguration.QueryRecognition();
            if (qrecognition == null)
            {
                throw new Exception("PXCMFaceConfiguration.RecognitionConfiguration null");
            }
            if (m_form.IsRecognitionChecked())
            {
                qrecognition.Enable();


                #region 臉部辨識資料庫讀取
                if (File.Exists(DatabasePath))
                {
                    m_form.UpdateStatus("正在讀取資料庫", MainForm.Label.StatusLabel);
                    List <RecognitionFaceData> faceData = null;
                    FaceDatabaseFile.Load(DatabasePath, ref faceData, ref NameMapping);
                    FaceData = faceData.ToArray();
                    qrecognition.SetDatabase(FaceData);
                }
                #endregion
            }



            moduleConfiguration.EnableAllAlerts();
            moduleConfiguration.SubscribeAlert(FaceAlertHandler);

            pxcmStatus applyChangesStatus = moduleConfiguration.ApplyChanges();

            m_form.UpdateStatus("Init Started", MainForm.Label.StatusLabel);

            if (applyChangesStatus < pxcmStatus.PXCM_STATUS_NO_ERROR || pp.Init() < pxcmStatus.PXCM_STATUS_NO_ERROR)
            {
                m_form.UpdateStatus("Init Failed", MainForm.Label.StatusLabel);
            }
            else
            {
                using (PXCMFaceData moduleOutput = faceModule.CreateOutput())
                {
                    Debug.Assert(moduleOutput != null);
                    PXCMCapture.Device.StreamProfileSet profiles;
                    PXCMCapture.Device device = captureMgr.QueryDevice();

                    if (device == null)
                    {
                        throw new Exception("device null");
                    }

                    device.QueryStreamProfileSet(PXCMCapture.StreamType.STREAM_TYPE_DEPTH, 0, out profiles);
                    CheckForDepthStream(profiles, faceModule);

                    m_form.UpdateStatus("Streaming", MainForm.Label.StatusLabel);
                    m_timer = new FPSTimer(m_form);

                    #region loop
                    while (!m_form.Stopped)
                    {
                        if (pp.AcquireFrame(true) < pxcmStatus.PXCM_STATUS_NO_ERROR)
                        {
                            break;
                        }
                        var isConnected = pp.IsConnected();
                        DisplayDeviceConnection(isConnected);
                        if (isConnected)
                        {
                            var sample = pp.QueryFaceSample();
                            if (sample == null)
                            {
                                pp.ReleaseFrame();
                                continue;
                            }
                            switch (mode)
                            {
                            case PXCMFaceConfiguration.TrackingModeType.FACE_MODE_IR:
                                if (sample.ir != null)
                                {
                                    DisplayPicture(sample.ir);
                                }
                                break;

                            default:
                                DisplayPicture(sample.color);
                                break;
                            }

                            moduleOutput.Update();
                            PXCMFaceConfiguration.RecognitionConfiguration recognition = moduleConfiguration.QueryRecognition();
                            if (recognition == null)
                            {
                                pp.ReleaseFrame();
                                continue;
                            }

                            if (recognition.properties.isEnabled)
                            {
                                UpdateRecognition(moduleOutput);
                            }

                            m_form.DrawGraphics(moduleOutput);
                            m_form.UpdatePanel();
                        }
                        pp.ReleaseFrame();
                    }
                    #endregion
                }

                //             moduleConfiguration.UnsubscribeAlert(FaceAlertHandler);
                //             moduleConfiguration.ApplyChanges();
                m_form.UpdateStatus("Stopped", MainForm.Label.StatusLabel);
            }

            #region 儲存臉部辨識資訊檔案
            if (DatabaseChanged)
            {
                FaceDatabaseFile.Save(DatabasePath, FaceData.ToList(), NameMapping);
            }
            #endregion

            var dbm = new FaceDatabaseManager(pp);


            moduleConfiguration.Dispose();
            pp.Close();
            pp.Dispose();
        }
Ejemplo n.º 25
0
    void InitializeRealSense()
    {
        pxcmStatus status;

        // Initialize a PXCMSenseManager instance
        senseManager = PXCMSenseManager.CreateInstance();

        if (senseManager == null)
        {
            Debug.LogError("PXCSenseManager.CreateInstance() failed");

            DisableRealSense();

            return;
        }

        status = senseManager.EnableFace();
        if (status != pxcmStatus.PXCM_STATUS_NO_ERROR)
        {
            Debug.Log("PXCSenseManager.EnableFace() failed [" + status + "]");

            DisableRealSense();

            return;
        }

        // Configure face module
        PXCMFaceConfiguration faceCfg;

        // Configure face module for detection (bounding rect and depth)
        faceCfg = senseManager.QueryFace().CreateActiveConfiguration();

        faceCfg.detection.isEnabled       = true;
        faceCfg.landmarks.isEnabled       = false;
        faceCfg.pose.isEnabled            = false;
        faceCfg.detection.maxTrackedFaces = 1;
        faceCfg.detection.smoothingLevel  = PXCMFaceConfiguration.SmoothingLevelType.SMOOTHING_HIGH;

        status = faceCfg.ApplyChanges();
        faceCfg.Dispose();

        if (status != pxcmStatus.PXCM_STATUS_NO_ERROR)
        {
            Debug.LogError("PXCMFaceConfiguration failed [" + status + "]");

            DisableRealSense();

            return;
        }

        // Initialize sense manager pipeline
        status = senseManager.Init();
        if (status != pxcmStatus.PXCM_STATUS_NO_ERROR)
        {
            Debug.LogError("PXCSenseManager.Init() failed [" + status + "]");

            DisableRealSense();

            return;
        }

        Debug.Log("RealSense Initialized successfully");
    }
Ejemplo n.º 26
0
	void InitializeRealSense()
	{
		pxcmStatus status;

		// Initialize a PXCMSenseManager instance
		senseManager = PXCMSenseManager.CreateInstance ();
		
		if (senseManager == null)
		{
			Debug.LogError ("PXCSenseManager.CreateInstance() failed");

            DisableRealSense();

            return;
		}
		
		status = senseManager.EnableFace();
		if(status != pxcmStatus.PXCM_STATUS_NO_ERROR)
		{
			Debug.Log("PXCSenseManager.EnableFace() failed [" + status + "]");

            DisableRealSense();

            return;
		}

        // Configure face module
        PXCMFaceConfiguration faceCfg;

        // Configure face module for detection (bounding rect and depth)
        faceCfg = senseManager.QueryFace().CreateActiveConfiguration();

        faceCfg.detection.isEnabled = true;
        faceCfg.landmarks.isEnabled = false;
        faceCfg.pose.isEnabled = false;
        faceCfg.detection.maxTrackedFaces = 1;
        faceCfg.detection.smoothingLevel = PXCMFaceConfiguration.SmoothingLevelType.SMOOTHING_HIGH;

        status = faceCfg.ApplyChanges();
        faceCfg.Dispose();

        if (status != pxcmStatus.PXCM_STATUS_NO_ERROR)
        {
            Debug.LogError("PXCMFaceConfiguration failed [" + status + "]");

            DisableRealSense();

            return;
        }

        // Initialize sense manager pipeline
        status = senseManager.Init ();
		if (status != pxcmStatus.PXCM_STATUS_NO_ERROR)
		{
			Debug.LogError ("PXCSenseManager.Init() failed [" + status + "]");

            DisableRealSense();

            return;
		}
		
		Debug.Log ("RealSense Initialized successfully");
	}
Ejemplo n.º 27
0
        public void SimplePipeline()
        {
            PXCMSenseManager pp = m_form.Session.CreateSenseManager();

            if (pp == null)
            {
                throw new Exception("PXCMSenseManager null");
            }

            // Set Source & Landmark Profile Index
            PXCMCapture.DeviceInfo info;
            if (m_form.GetRecordState())
            {
                pp.captureManager.SetFileName(m_form.GetFileName(), true);
                if (m_form.Devices.TryGetValue(m_form.GetCheckedDevice(), out info))
                {
                    pp.captureManager.FilterByDeviceInfo(info);
                }
            }
            else if (m_form.GetPlaybackState())
            {
                pp.captureManager.SetFileName(m_form.GetFileName(), false);
                PXCMCaptureManager cmanager = pp.QueryCaptureManager();
                if (cmanager == null)
                {
                    throw new Exception("PXCMCaptureManager null");
                }
                cmanager.SetRealtime(false);
            }
            else
            {
                if (m_form.Devices.TryGetValue(m_form.GetCheckedDevice(), out info))
                {
                    pp.captureManager.FilterByDeviceInfo(info);
                    Tuple <PXCMImage.ImageInfo, PXCMRangeF32> selectedRes = m_form.GetCheckedColorResolution();
                    var set = new PXCMCapture.Device.StreamProfileSet();
                    set.color.frameRate        = selectedRes.Item2;
                    set.color.imageInfo.format = selectedRes.Item1.format;
                    set.color.imageInfo.width  = selectedRes.Item1.width;
                    set.color.imageInfo.height = selectedRes.Item1.height;
                    pp.captureManager.FilterByStreamProfiles(set);
                }
            }

            // Set Module
            pp.EnableFace();
            PXCMFaceModule faceModule = pp.QueryFace();

            if (faceModule == null)
            {
                Debug.Assert(faceModule != null);
                return;
            }

            PXCMFaceConfiguration moduleConfiguration = faceModule.CreateActiveConfiguration();

            if (moduleConfiguration == null)
            {
                Debug.Assert(moduleConfiguration != null);
                return;
            }

            PXCMFaceConfiguration.TrackingModeType mode = m_form.GetCheckedProfile().Contains("3D")
                ? PXCMFaceConfiguration.TrackingModeType.FACE_MODE_COLOR_PLUS_DEPTH
                : PXCMFaceConfiguration.TrackingModeType.FACE_MODE_COLOR;

            moduleConfiguration.SetTrackingMode(mode);

            moduleConfiguration.strategy = PXCMFaceConfiguration.TrackingStrategyType.STRATEGY_RIGHT_TO_LEFT;

            moduleConfiguration.detection.maxTrackedFaces = m_form.NumDetection;
            moduleConfiguration.landmarks.maxTrackedFaces = m_form.NumLandmarks;
            moduleConfiguration.pose.maxTrackedFaces      = m_form.NumPose;
            PXCMFaceConfiguration.ExpressionsConfiguration econfiguration = moduleConfiguration.QueryExpressions();
            if (econfiguration == null)
            {
                throw new Exception("ExpressionsConfiguration null");
            }
            econfiguration.properties.maxTrackedFaces = m_form.NumExpressions;

            econfiguration.EnableAllExpressions();
            moduleConfiguration.detection.isEnabled = m_form.IsDetectionEnabled();
            moduleConfiguration.landmarks.isEnabled = m_form.IsLandmarksEnabled();
            moduleConfiguration.pose.isEnabled      = m_form.IsPoseEnabled();
            if (m_form.IsExpressionsEnabled())
            {
                econfiguration.Enable();
            }

            PXCMFaceConfiguration.RecognitionConfiguration qrecognition = moduleConfiguration.QueryRecognition();
            if (qrecognition == null)
            {
                throw new Exception("PXCMFaceConfiguration.RecognitionConfiguration null");
            }
            if (m_form.IsRecognitionChecked())
            {
                qrecognition.Enable();
            }

            moduleConfiguration.EnableAllAlerts();
            moduleConfiguration.SubscribeAlert(FaceAlertHandler);

            pxcmStatus applyChangesStatus = moduleConfiguration.ApplyChanges();

            m_form.UpdateStatus("Init Started", MainForm.Label.StatusLabel);

            if (applyChangesStatus < pxcmStatus.PXCM_STATUS_NO_ERROR || pp.Init() < pxcmStatus.PXCM_STATUS_NO_ERROR)
            {
                m_form.UpdateStatus("Init Failed", MainForm.Label.StatusLabel);
            }
            else
            {
                using (PXCMFaceData moduleOutput = faceModule.CreateOutput())
                {
                    Debug.Assert(moduleOutput != null);
                    PXCMCapture.Device.StreamProfileSet profiles;

                    PXCMCaptureManager cmanager = pp.QueryCaptureManager();
                    if (cmanager == null)
                    {
                        throw new Exception("capture manager null");
                    }
                    PXCMCapture.Device device = cmanager.QueryDevice();

                    if (device == null)
                    {
                        throw new Exception("device null");
                    }

                    device.QueryStreamProfileSet(PXCMCapture.StreamType.STREAM_TYPE_DEPTH, 0, out profiles);
                    CheckForDepthStream(profiles, faceModule);

                    ushort threshold      = device.QueryDepthConfidenceThreshold();
                    int    filter_option  = device.QueryIVCAMFilterOption();
                    int    range_tradeoff = device.QueryIVCAMMotionRangeTradeOff();

                    device.SetDepthConfidenceThreshold(1);
                    device.SetIVCAMFilterOption(6);
                    device.SetIVCAMMotionRangeTradeOff(21);

                    if (m_form.IsMirrored())
                    {
                        device.SetMirrorMode(PXCMCapture.Device.MirrorMode.MIRROR_MODE_HORIZONTAL);
                    }
                    else
                    {
                        device.SetMirrorMode(PXCMCapture.Device.MirrorMode.MIRROR_MODE_DISABLED);
                    }

                    m_form.UpdateStatus("Streaming", MainForm.Label.StatusLabel);
                    m_timer = new FPSTimer(m_form);

                    while (!m_form.Stopped)
                    {
                        if (pp.AcquireFrame(true) < pxcmStatus.PXCM_STATUS_NO_ERROR)
                        {
                            break;
                        }
                        bool isConnected = pp.IsConnected();
                        DisplayDeviceConnection(isConnected);
                        if (isConnected)
                        {
                            PXCMCapture.Sample sample = pp.QueryFaceSample();
                            if (sample == null)
                            {
                                pp.ReleaseFrame();
                                continue;
                            }
                            DisplayPicture(sample.color);

                            moduleOutput.Update();
                            if (moduleConfiguration.QueryRecognition().properties.isEnabled)
                            {
                                UpdateRecognition(moduleOutput);
                            }

                            m_form.DrawGraphics(moduleOutput);
                            m_form.UpdatePanel();
                        }
                        pp.ReleaseFrame();
                    }

                    device.SetDepthConfidenceThreshold(threshold);
                    device.SetIVCAMFilterOption(filter_option);
                    device.SetIVCAMMotionRangeTradeOff(range_tradeoff);
                }

                moduleConfiguration.UnsubscribeAlert(FaceAlertHandler);
                moduleConfiguration.ApplyChanges();
                m_form.UpdateStatus("Stopped", MainForm.Label.StatusLabel);
            }
            moduleConfiguration.Dispose();
            pp.Close();
            pp.Dispose();
        }
Ejemplo n.º 28
0
        // Use this for initialization
        void Start()
        {
            // Creates an instance of the sense manager to be called later
            session = PXCMSenseManager.CreateInstance();


            //Output an error if there is no instance of the sense manager
            if (session == null)
            {
                Debug.LogError("SenseManager Init Failed!");
            }


            // Enables hand tracking
            sts          = session.EnableHand();
            handAnalyzer = session.QueryHand();

            sts2         = session.EnableFace();
            faceAnalyzer = session.QueryFace();


            if (sts != pxcmStatus.PXCM_STATUS_NO_ERROR)
            {
                Debug.LogError("PXCSenseManager.EnableHand: " + sts);
            }


            if (sts2 != pxcmStatus.PXCM_STATUS_NO_ERROR)
            {
                Debug.LogError("PXCSenseManager.EnableFace: " + sts2);
            }



            // Creates the session
            sts = session.Init();

            sts2 = session.Init();


            if (sts != pxcmStatus.PXCM_STATUS_NO_ERROR)
            {
                Debug.LogError("PXCSenseManager.Init: " + sts);
            }


            if (sts2 != pxcmStatus.PXCM_STATUS_NO_ERROR)
            {
                Debug.LogError("PXCSenseManager.Init: " + sts2);
            }



            // Creates a hand config for future data
            PXCMHandConfiguration handconfig = handAnalyzer.CreateActiveConfiguration();

            PXCMFaceConfiguration faceconfig = faceAnalyzer.CreateActiveConfiguration();


            //If there is handconfig instance
            if (handconfig != null)
            {
                handconfig.EnableAllAlerts();
                handconfig.ApplyChanges();
                handconfig.Dispose();
            }
            if (faceconfig != null)
            {
                faceconfig.EnableAllAlerts();
                faceconfig.ApplyChanges();
                faceconfig.Dispose();
            }
        }
Ejemplo n.º 29
0
        void OnEnable()
        {
            Initialized = false;

            /* Create a SenseManager instance */
            SenseManager = PXCMSenseManager.CreateInstance();

            if (SenseManager == null)
            {
                print("Unable to create the pipeline instance");
                return;
            }

            if (_speechCommandsRef.Count != 0)
            {
                SetSenseOption(SenseOption.SenseOptionID.Speech);
            }

            int numberOfEnabledModalities = 0;

            //Set mode according to RunMode - play from file / record / live stream
            if (RunMode == MCTTypes.RunModes.PlayFromFile)
            {
                //CHECK IF FILE EXISTS
                if (!System.IO.File.Exists(FilePath))
                {
                    Debug.LogWarning("No Filepath Set Or File Doesn't Exist, Run Mode Will Be Changed to Live Stream");
                    RunMode = MCTTypes.RunModes.LiveStream;
                }
                else
                {
                    PXCMCaptureManager cManager = SenseManager.QueryCaptureManager();
                    cManager.SetFileName(FilePath, false);
                    Debug.Log("SenseToolkitManager: Playing from file: " + FilePath);
                }
            }

            if (RunMode == MCTTypes.RunModes.RecordToFile)
            {
                //CHECK IF PATH
                string PathOnly = FilePath;
                while (!PathOnly[PathOnly.Length - 1].Equals('\\'))
                {
                    PathOnly = PathOnly.Remove(PathOnly.Length - 1, 1);
                }

                if (!System.IO.Directory.Exists(PathOnly))
                {
                    Debug.LogWarning("No Filepath Set Or Path Doesn't Exist, Run Mode Will Be Changed to Live Stream");
                    RunMode = MCTTypes.RunModes.LiveStream;
                }
                else
                {
                    PXCMCaptureManager cManager = SenseManager.QueryCaptureManager();
                    cManager.SetFileName(FilePath, true);
                    Debug.Log("SenseToolkitManager: Recording to file: " + FilePath);
                }
            }

            /* Enable modalities according to the set options*/
            if (IsSenseOptionSet(SenseOption.SenseOptionID.Face, true))
            {
                SenseManager.EnableFace();
                _senseOptions.Find( i => i.ID == SenseOption.SenseOptionID.Face).Initialized = true;
                _senseOptions.Find( i => i.ID == SenseOption.SenseOptionID.Face).Enabled = true;
                SetSenseOption(SenseOption.SenseOptionID.VideoColorStream);
                numberOfEnabledModalities++;
            }

            if (IsSenseOptionSet(SenseOption.SenseOptionID.Hand, true))
            {
                _sts = SenseManager.EnableHand();
                _senseOptions.Find( i => i.ID == SenseOption.SenseOptionID.Hand).Initialized = true;
                _senseOptions.Find( i => i.ID == SenseOption.SenseOptionID.Hand).Enabled = true;
                numberOfEnabledModalities++;
            }

            if (IsSenseOptionSet(SenseOption.SenseOptionID.Object, true))
            {
                _sts = SenseManager.EnableTracker();
                _senseOptions.Find( i => i.ID == SenseOption.SenseOptionID.Object).Initialized = true;
                _senseOptions.Find( i => i.ID == SenseOption.SenseOptionID.Object).Enabled = true;
                numberOfEnabledModalities++;
            }

            if (IsSenseOptionSet(SenseOption.SenseOptionID.Speech, true))
            {
                if (!SpeechManager.IsInitialized)
                {
                    if (SpeechManager.InitalizeSpeech())
                    {
                        _senseOptions.Find( i => i.ID == SenseOption.SenseOptionID.Speech).Initialized = true;
                        _senseOptions.Find( i => i.ID == SenseOption.SenseOptionID.Speech).Enabled = true;
                        numberOfEnabledModalities++;
                    }
                    else
                    {
                        UnsetSenseOption(SenseOption.SenseOptionID.Speech);
                    }
                }
                else
                {
                    _senseOptions.Find( i => i.ID == SenseOption.SenseOptionID.Speech).Initialized = true;
                    _senseOptions.Find( i => i.ID == SenseOption.SenseOptionID.Speech).Enabled = true;
                    numberOfEnabledModalities++;
                }
            }

            if (IsSenseOptionSet(SenseOption.SenseOptionID.VideoDepthStream, true) ||
                IsSenseOptionSet(SenseOption.SenseOptionID.PointCloud, true))
            {
                SenseManager.EnableStream(PXCMCapture.StreamType.STREAM_TYPE_DEPTH, 0, 0, 0);
                _senseOptions.Find( i => i.ID == SenseOption.SenseOptionID.VideoDepthStream).Initialized = true;
                _senseOptions.Find( i => i.ID == SenseOption.SenseOptionID.VideoDepthStream).Enabled = true;
                numberOfEnabledModalities++;
            }

            if (IsSenseOptionSet(SenseOption.SenseOptionID.VideoIRStream, true))
            {
                SenseManager.EnableStream(PXCMCapture.StreamType.STREAM_TYPE_IR, 0, 0, 0);
                _senseOptions.Find( i => i.ID == SenseOption.SenseOptionID.VideoIRStream).Initialized = true;
                _senseOptions.Find( i => i.ID == SenseOption.SenseOptionID.VideoIRStream).Enabled = true;
                numberOfEnabledModalities++;
            }

            if (IsSenseOptionSet(SenseOption.SenseOptionID.VideoColorStream, true))
            {
                if (ColorImageQuality == MCTTypes.RGBQuality.FullHD)
                {
                    SenseManager.EnableStream(PXCMCapture.StreamType.STREAM_TYPE_COLOR, 1920, 1080, 0);
                }
                else if (ColorImageQuality == MCTTypes.RGBQuality.HD)
                {
                    SenseManager.EnableStream(PXCMCapture.StreamType.STREAM_TYPE_COLOR, 1280, 720, 0);
                }
                else if (ColorImageQuality == MCTTypes.RGBQuality.HalfHD)
                {
                    SenseManager.EnableStream(PXCMCapture.StreamType.STREAM_TYPE_COLOR, 960, 540, 0);
                }
                else
                {
                    SenseManager.EnableStream(PXCMCapture.StreamType.STREAM_TYPE_COLOR, 640, 480, 0);
                }
                _senseOptions.Find( i => i.ID == SenseOption.SenseOptionID.VideoColorStream).Initialized = true;
                _senseOptions.Find( i => i.ID == SenseOption.SenseOptionID.VideoColorStream).Enabled = true;
                numberOfEnabledModalities++;
            }

            if (IsSenseOptionSet(SenseOption.SenseOptionID.VideoSegmentation, true))
            {
                if (ColorImageQuality == MCTTypes.RGBQuality.FullHD)
                {
                    SenseManager.EnableStream(PXCMCapture.StreamType.STREAM_TYPE_COLOR, 1920, 1080, 0);
                }
                else if (ColorImageQuality == MCTTypes.RGBQuality.HD)
                {
                    SenseManager.EnableStream(PXCMCapture.StreamType.STREAM_TYPE_COLOR, 1280, 720, 0);
                }
                else if (ColorImageQuality == MCTTypes.RGBQuality.HalfHD)
                {
                    SenseManager.EnableStream(PXCMCapture.StreamType.STREAM_TYPE_COLOR, 960, 540, 0);
                }
                else
                {
                    SenseManager.EnableStream(PXCMCapture.StreamType.STREAM_TYPE_COLOR, 640, 480, 0);
                }
                SenseManager.Enable3DSeg();
                _senseOptions.Find( i => i.ID == SenseOption.SenseOptionID.VideoSegmentation).Initialized = true;
                _senseOptions.Find( i => i.ID == SenseOption.SenseOptionID.VideoSegmentation).Enabled = true;
                numberOfEnabledModalities++;
            }

            /* Initialize the execution */
            _sts = SenseManager.Init();
            if (_sts < pxcmStatus.PXCM_STATUS_NO_ERROR)
            {
                if (numberOfEnabledModalities > 0)
                {
                    Debug.LogError("Unable to initialize all modalities");
                }
                return;
            }
            //Set different configurations:

            // Face
            if (IsSenseOptionSet(SenseOption.SenseOptionID.Face, true))
            {
                var faceModule = SenseManager.QueryFace();
                var faceConfiguration = faceModule.CreateActiveConfiguration();
                if (faceConfiguration == null) throw new UnityException("CreateActiveConfiguration returned null");

                faceConfiguration.Update();

                faceConfiguration.detection.isEnabled = true;
                faceConfiguration.detection.smoothingLevel = PXCMFaceConfiguration.SmoothingLevelType.SMOOTHING_DISABLED;

                faceConfiguration.landmarks.isEnabled = true;
                faceConfiguration.landmarks.smoothingLevel = PXCMFaceConfiguration.SmoothingLevelType.SMOOTHING_DISABLED;

                faceConfiguration.pose.isEnabled = true;
                faceConfiguration.pose.smoothingLevel = PXCMFaceConfiguration.SmoothingLevelType.SMOOTHING_DISABLED;

                faceConfiguration.DisableAllAlerts();

                faceConfiguration.strategy = PXCMFaceConfiguration.TrackingStrategyType.STRATEGY_APPEARANCE_TIME;

                if ((NumberOfDetectedFaces < 1) || (NumberOfDetectedFaces > 15))
                {
                    Debug.Log("Ilegal value for Number Of Detected Faces, value is set to 1");
                    NumberOfDetectedFaces = 1;
                }

                faceConfiguration.detection.maxTrackedFaces = NumberOfDetectedFaces;
                faceConfiguration.landmarks.maxTrackedFaces = NumberOfDetectedFaces;
                faceConfiguration.pose.maxTrackedFaces = NumberOfDetectedFaces;

                PXCMFaceConfiguration.ExpressionsConfiguration expressionConfig = faceConfiguration.QueryExpressions();
                expressionConfig.Enable();
                expressionConfig.EnableAllExpressions();

                faceConfiguration.ApplyChanges();
                faceConfiguration.Dispose();

                FaceModuleOutput = faceModule.CreateOutput();

                UnsetSenseOption(SenseOption.SenseOptionID.VideoColorStream);
            }

            // Hand
            if (IsSenseOptionSet(SenseOption.SenseOptionID.Hand, true))
            {
                PXCMHandModule handAnalysis = SenseManager.QueryHand();

                PXCMHandConfiguration handConfiguration = handAnalysis.CreateActiveConfiguration();
                if (handConfiguration == null) throw new UnityException("CreateActiveConfiguration returned null");

                handConfiguration.Update();
                handConfiguration.EnableAllGestures();
                handConfiguration.EnableStabilizer(true);
                handConfiguration.DisableAllAlerts();
                handConfiguration.EnableSegmentationImage(false);
                handConfiguration.ApplyChanges();
                handConfiguration.Dispose();

                HandDataOutput = handAnalysis.CreateOutput();
            }

            if (IsSenseOptionSet(SenseOption.SenseOptionID.Object, true))
            {
                if (_senseOptions.Find( i => i.ID == SenseOption.SenseOptionID.Object).Enabled != true)
                {
                    _senseOptions.Find( i => i.ID == SenseOption.SenseOptionID.Object).Enabled = true;
                    OnDisable();
                    OnEnable();
                }
            }

            if (IsSenseOptionSet(SenseOption.SenseOptionID.Speech, true))
            {
                UpdateSpeechCommands();
                SpeechManager.Start();
            }

            // Create an instance for the projection & blob extractor

            if (Projection == null)
            {
                Projection = SenseManager.QueryCaptureManager().QueryDevice().CreateProjection();
            }

            if (BlobExtractor == null)
            {
                SenseManager.session.CreateImpl<PXCMBlobExtractor>(out BlobExtractor);
            }

            // Set initialization flag
            Initialized = true;
        }
Ejemplo n.º 30
0
        private void Window_Loaded(object sender, RoutedEventArgs e)
        {
            this.DataContext = this;

            SenseManager = PXCMSenseManager.CreateInstance();

            SenseManager.EnableStream(PXCMCapture.StreamType.STREAM_TYPE_COLOR, 1280, 720);
            SenseManager.EnableFace();

            InitializeCamera();
            ConfigurePollingTask();
        }
Ejemplo n.º 31
0
        public void StreamColorDepth() /* Stream Color and Depth Synchronously or Asynchronously */
        {
            try
            {
                bool sts = true;

                /* Create an instance of the PXCMSenseManager interface */
                PXCMSenseManager sm = PXCMSenseManager.CreateInstance();

                if (sm == null)
                {
                    MessageBox.Show("当前对象为空!");
                    return;
                }

                /* Optional: if playback or recoridng */
                if (File != null)
                {
                    sm.captureManager.SetFileName(File, this.isRecord);
                }

                sm.EnableStream(PXCMCapture.StreamType.STREAM_TYPE_COLOR, 1920, 1080);

                sm.EnableStream(PXCMCapture.StreamType.STREAM_TYPE_DEPTH, 640, 480);
                sm.EnableHand();
                sm.EnableFace();


                /* Initialization */

                pxcmStatus status = sm.Init();

                if (status >= pxcmStatus.PXCM_STATUS_NO_ERROR)
                {
                    /* Reset all properties */
                    sm.captureManager.device.ResetProperties(PXCMCapture.StreamType.STREAM_TYPE_ANY);

                    while (!Stop)
                    {
                        /* Wait until a frame is ready: Synchronized or Asynchronous */
                        if (sm.AcquireFrame(true).IsError())
                        {
                            break;
                        }

                        /* Display images */
                        //PXCMCapture.Sample sample = sm.QuerySample();

                        /* Render streams */
                        //EventHandler<RenderFrameEventArgs> render = RenderFrame;
                        //PXCMImage image = null;

                        //image = sample[PXCMCapture.StreamType.STREAM_TYPE_DEPTH];
                        //render(this, new RenderFrameEventArgs(0, image));


                        sm.ReleaseFrame();
                    }
                }
                else
                {
                    sts = false;
                }

                sm.Dispose();
                //if (sts) MessageBox.Show("Stopped");
            }
            catch (Exception e)
            {
                MessageBox.Show(e.Message);
                //MessageBox.Show(e.GetType().ToString());
            }
        }
Ejemplo n.º 32
0
        private void ProcessingThread()
        {
            string videoName, nameColor, nameDepth, nameIr;
            int    width  = 640;
            int    height = 480;

            int       frameIndex      = 0;
            string    formatImageFile = ".png";
            int       nframes         = 0;
            int       lostFrames      = 0;
            string    landmarks       = null;
            long      frameTimeStamp  = 0;
            PXCMImage color;
            PXCMImage depth;
            PXCMImage ir;

            PXCMCapture.Sample    sample;
            PXCMImage.ImageData   imageColor;
            PXCMImage.ImageData   imageDepth;
            PXCMImage.ImageData   imageIr;
            WriteableBitmap       wbm1, wbm2, wbm3;
            Int32Rect             rect2crop;
            PXCMFaceModule        faceModule;
            PXCMFaceConfiguration faceConfig;
            PXCMFaceData          faceData = null;
            //Offset Cropped rectangle
            Offset offset = new Offset(0, 0, 0, 0);

            //For each directory, extract all landmarks or images streams from all videos
            foreach (var dir in dirsSource)
            {
                //If the folder is not empty
                if (Directory.EnumerateFileSystemEntries(dir).Any())
                {
                    dictPaths.TryGetValue(dir, out paths); //This dict contains all source and output dirs
                    List <string> fileList = new List <string>(Directory.GetFiles(dir, "*.rssdk"));
                    //For each video
                    foreach (var inputFile in fileList)
                    {
                        lostFrames = 0;
                        videoName  = inputFile.Split('\\').Last().Split('.')[0];
                        // Create a SenseManager instance
                        sm = PXCMSenseManager.CreateInstance();
                        // Recording mode: true
                        // Playback mode: false
                        // Settings for playback mode (read rssdk files and extract frames)
                        sm.captureManager.SetFileName(inputFile, false);
                        sm.captureManager.SetRealtime(false);
                        nframes = sm.captureManager.QueryNumberOfFrames();

                        //Update in realtime the current extraction
                        Application.Current.Dispatcher.BeginInvoke(new Action(() =>
                        {
                            textBox2.Text = nframes.ToString();
                            textBox3.Text = String.Format("Record: {0}\nVideo: {1}", paths.root, videoName);
                        }));

                        sm.EnableStream(PXCMCapture.StreamType.STREAM_TYPE_COLOR, width, height, 0);
                        sm.EnableStream(PXCMCapture.StreamType.STREAM_TYPE_DEPTH, width, height);
                        sm.EnableStream(PXCMCapture.StreamType.STREAM_TYPE_IR, width, height);

                        //Extract Landmarks
                        sm.EnableFace();
                        faceModule = sm.QueryFace();
                        faceConfig = faceModule.CreateActiveConfiguration();
                        faceConfig.landmarks.maxTrackedFaces = 1;
                        faceConfig.landmarks.isEnabled       = true;
                        faceConfig.detection.maxTrackedFaces = 1;
                        faceConfig.detection.isEnabled       = true;
                        faceConfig.EnableAllAlerts();
                        faceConfig.ApplyChanges();

                        sm.Init();

                        // This string stores all data before saving to csv file
                        landmarks = null;
                        // Start AcquireFrame/ReleaseFrame loop
                        var stopwatch = new Stopwatch();
                        stopwatch.Start();

                        while (sm.AcquireFrame(true) >= pxcmStatus.PXCM_STATUS_NO_ERROR)
                        {
                            // Retrieve face data
                            faceModule = sm.QueryFace();
                            frameIndex = sm.captureManager.QueryFrameIndex();
                            if (faceModule != null)
                            {
                                // Retrieve the most recent processed data
                                faceData = faceModule.CreateOutput();
                                faceData.Update();
                            }
                            if (faceData != null)
                            {
                                Int32 nfaces = faceData.QueryNumberOfDetectedFaces();

                                frameTimeStamp = sm.captureManager.QueryFrameTimeStamp();
                                //PXCMCapture.Sample sample = senseManager.QuerySample();
                                if (nfaces == 0) //If none face was detected, we will consider as a "lost frame"
                                {
                                    lostFrames += 1;
                                }
                                for (Int32 i = 0; i < nfaces; i++)
                                {
                                    //Retrieve the image
                                    sample = sm.QuerySample();
                                    // Work on the images
                                    color = sample.color;
                                    depth = sample.depth;
                                    ir    = sample.ir;

                                    PXCMFaceData.Face          face         = faceData.QueryFaceByIndex(i);
                                    PXCMFaceData.LandmarksData landmarkData = face.QueryLandmarks();
                                    PXCMFaceData.DetectionData ddata        = face.QueryDetection();
                                    PXCMFaceData.PoseData      poseData     = face.QueryPose();
                                    poseData.QueryHeadPosition(out PXCMFaceData.HeadPosition headPosition);
                                    poseData.QueryPoseAngles(out PXCMFaceData.PoseEulerAngles poseEulerAngles);
                                    Debug.WriteLine(headPosition.headCenter.x + " " + headPosition.headCenter.y + " " + headPosition.headCenter.z + " " + poseEulerAngles.pitch + " " + poseEulerAngles.roll + " " + poseEulerAngles.yaw);

                                    //Rectangle coordenates from detected face
                                    ddata.QueryBoundingRect(out PXCMRectI32 rect);

                                    //See the offset struct to define the values
                                    rect2crop = new Int32Rect(rect.x + offset.x, rect.y + offset.y, rect.w + offset.w, rect.h + offset.h);
                                    ddata.QueryFaceAverageDepth(out Single depthDistance);

                                    color.AcquireAccess(PXCMImage.Access.ACCESS_READ, PXCMImage.PixelFormat.PIXEL_FORMAT_RGB32, out imageColor);
                                    depth.AcquireAccess(PXCMImage.Access.ACCESS_READ, PXCMImage.PixelFormat.PIXEL_FORMAT_DEPTH_RAW, out imageDepth);
                                    ir.AcquireAccess(PXCMImage.Access.ACCESS_READ, PXCMImage.PixelFormat.PIXEL_FORMAT_RGB24, out imageIr);

                                    //Convert it to Bitmap
                                    wbm1 = imageColor.ToWritableBitmap(0, color.info.width, color.info.height, 100.0, 100.0);
                                    wbm2 = imageDepth.ToWritableBitmap(0, depth.info.width, depth.info.height, 100.0, 100.0);
                                    wbm3 = imageIr.ToWritableBitmap(0, ir.info.width, ir.info.height, 100.0, 100.0);

                                    color.ReleaseAccess(imageColor);
                                    depth.ReleaseAccess(imageDepth);
                                    ir.ReleaseAccess(imageIr);

                                    nameColor = paths.rgbFolder + "\\" + videoName + "\\" + videoName + "_color_" + frameIndex + formatImageFile;
                                    nameDepth = paths.depthFolder + "\\" + videoName + "\\" + videoName + "_depth_" + frameIndex + formatImageFile;
                                    nameIr    = paths.irFolder + "\\" + videoName + "\\" + videoName + "_ir_" + frameIndex + formatImageFile;

                                    //Crops the face images!
                                    CreateThumbnail(nameColor, new CroppedBitmap(wbm1, rect2crop));
                                    CreateThumbnail(nameDepth, new CroppedBitmap(wbm2, rect2crop));
                                    CreateThumbnail(nameIr, new CroppedBitmap(wbm3, rect2crop));

                                    //Debug.WriteLine((depthDistance /1000 ) + " m" + " " + rect.x + " " + rect.y + " " + rect.w + " " + rect.h);

                                    /*
                                     * x - The horizontal coordinate of the top left pixel of the rectangle.
                                     * y - The vertical coordinate of the top left pixel of the rectangle.
                                     * w - The rectangle width in pixels.
                                     * h -The rectangle height in pixels.*/

                                    if (landmarkData != null)
                                    {
                                        PXCMFaceData.LandmarkPoint[] landmarkPoints;
                                        landmarkData.QueryPoints(out landmarkPoints);

                                        Application.Current.Dispatcher.BeginInvoke(new Action(() => textBox1.Text = frameIndex.ToString()));

                                        landmarks += inputFile.Split('\\').Last() + ";" + frameIndex + ";" + nameColor + ";" + nameDepth + ";" + nameIr + ";" + frameTimeStamp + ";" + depthDistance.ToString("F") + ";" + poseEulerAngles.yaw.ToString("F") + ";" + poseEulerAngles.pitch.ToString("F") + ";" + poseEulerAngles.roll.ToString("F") + ";"; // Begin line with frame info

                                        for (int j = 0; j < landmarkPoints.Length; j++)                                                                                                                                                                                                                                                                    // Writes landmarks coordinates along the line
                                        {
                                            //get world coordinates
                                            landmarks += /*landmarkPoints[j].source.index + ";" +*/ (landmarkPoints[j].world.x * 1000).ToString("F") + ";" + (landmarkPoints[j].world.y * 1000).ToString("F") + ";" + (landmarkPoints[j].world.z * 1000).ToString("F") + ";";
                                        }
                                        for (int j = 0; j < landmarkPoints.Length; j++)
                                        {
                                            //get coordinate of the image pixel
                                            landmarks += /*landmarkPoints[j].confidenceImage + ";" + */ landmarkPoints[j].image.x.ToString("F") + ";" + landmarkPoints[j].image.y.ToString("F") + ";";
                                        }
                                        landmarks += '\n'; // Breaks line after the end of the frame coordinates
                                    }
                                }
                            }
                            // Release the frame
                            if (faceData != null)
                            {
                                faceData.Dispose();
                            }
                            sm.ReleaseFrame();

                            WriteToFile(paths.csvFile, landmarks);
                            landmarks = null;
                        }
                        sm.Dispose();
                        stopwatch.Stop();
                        //Update in realtime the current extraction
                        Application.Current.Dispatcher.BeginInvoke(new Action(() =>
                        {
                            elapsedLabel.Content = String.Format("Elapsed Time: {0} (s)", stopwatch.Elapsed.TotalSeconds.ToString("F"));
                        }));
                    }
                }
            }
        }
Ejemplo n.º 33
0
        public void FaceAndHandPipeLine()
        {
            PXCMSenseManager pp = m_form.Session.CreateSenseManager();

            if (pp == null)
            {
                throw new Exception("PXCMSenseManager null");
            }

            pp.EnableStream(PXCMCapture.StreamType.STREAM_TYPE_COLOR, 640, 360);

            // 面部初始化
            pp.EnableFace();
            PXCMFaceModule faceModule = pp.QueryFace();

            if (faceModule == null)
            {
                Debug.Assert(true);
                return;
            }
            PXCMFaceConfiguration faceCfg = faceModule.CreateActiveConfiguration();

            if (faceCfg == null)
            {
                Debug.Assert(true);
                return;
            }
            faceCfg.SetTrackingMode(PXCMFaceConfiguration.TrackingModeType.FACE_MODE_COLOR_PLUS_DEPTH);
            faceCfg.strategy = PXCMFaceConfiguration.TrackingStrategyType.STRATEGY_CLOSEST_TO_FARTHEST;
            // 单个人追踪
            faceCfg.detection.maxTrackedFaces = NUM_PERSONS;
            faceCfg.landmarks.maxTrackedFaces = NUM_PERSONS;
            faceCfg.pose.maxTrackedFaces      = NUM_PERSONS;

            // 表情初始化
            PXCMFaceConfiguration.ExpressionsConfiguration expressionCfg = faceCfg.QueryExpressions();
            if (expressionCfg == null)
            {
                throw new Exception("ExpressionsConfiguration null");
            }
            expressionCfg.properties.maxTrackedFaces = NUM_PERSONS;

            expressionCfg.EnableAllExpressions();
            faceCfg.detection.isEnabled = true;
            faceCfg.landmarks.isEnabled = true;
            faceCfg.pose.isEnabled      = true;
            if (expressionCfg != null)
            {
                expressionCfg.Enable();
            }

            //脉搏初始化
            PXCMFaceConfiguration.PulseConfiguration pulseConfiguration = faceCfg.QueryPulse();
            if (pulseConfiguration == null)
            {
                throw new Exception("pulseConfiguration null");
            }

            pulseConfiguration.properties.maxTrackedFaces = NUM_PERSONS;
            if (pulseConfiguration != null)
            {
                pulseConfiguration.Enable();
            }

            faceCfg.ApplyChanges();

            if (pp.Init() < pxcmStatus.PXCM_STATUS_NO_ERROR)
            {
                throw new Exception("Init failed");
            }
            else
            {
                using (PXCMFaceData faceData = faceModule.CreateOutput())
                {
                    if (faceData == null)
                    {
                        throw new Exception("face data failure");
                    }
                    while (!m_form.Stopped)
                    {
                        if (pp.AcquireFrame(true).IsError())
                        {
                            break;
                        }

                        var isConnected = pp.IsConnected();
                        if (isConnected)
                        {
                            var sample = pp.QueryFaceSample();
                            if (sample == null)
                            {
                                pp.ReleaseFrame();
                                continue;
                            }

                            // default is COLOR
                            DisplayPicture(sample.color);

                            // 如果检测脸数==0,则continue
                            faceData.Update();
                            int nFace = faceData.QueryNumberOfDetectedFaces();
                            if (nFace == 0)
                            {
                                continue;
                            }

                            // 存
                            PXCMFaceData.Face face = faceData.QueryFaceByIndex(0);
                            //SaveFaceLandmarkData(face);
                            SaveFacialExpressionData(face);

                            m_form.UpdatePic();
                        }
                        pp.ReleaseFrame();
                    }
                }
            }



            pp.Close();
            pp.Dispose();
        }
Ejemplo n.º 34
0
        //*********************************初始化函数(初始化各种杂乱的参数)*******************************************************************

        // Init all crappy parameters RealSense needs
        private void InitStreamState()
        {
            manager = session.CreateSenseManager();

            //算法模块
            switch (m_algoOption)
            {
            case AlgoOption.Face:
                manager.EnableFace();
                break;

            case AlgoOption.Hand:
                manager.EnableHand();
                break;

            case AlgoOption.FaceAndHand:
                manager.EnableFace();
                manager.EnableHand();
                break;

            default:
                break;
            }

            //视频显示流模块
            switch (m_streamOption)
            {
            case StreamOption.Color:
                //manager.EnableStream(PXCMCapture.StreamType.STREAM_TYPE_COLOR, 640, 360);
                manager.EnableStream(PXCMCapture.StreamType.STREAM_TYPE_COLOR, 1920, 1080);
                break;

            case StreamOption.Depth:
                manager.EnableStream(PXCMCapture.StreamType.STREAM_TYPE_DEPTH, 640, 480);
                break;

            case StreamOption.IR:
                manager.EnableStream(PXCMCapture.StreamType.STREAM_TYPE_IR, 640, 480);
                break;

            case StreamOption.ColorAndDepth:
                manager.EnableStream(PXCMCapture.StreamType.STREAM_TYPE_DEPTH, 640, 480);
                manager.EnableStream(PXCMCapture.StreamType.STREAM_TYPE_COLOR, 1920, 1080);
                break;

            default:
                break;
            }

            //控制录像模块
            var recordPath = System.Configuration.ConfigurationManager.AppSettings["RecordPath"];

            switch (m_recordOption)
            {
            case RecordOption.Live:
                break;

            case RecordOption.Record:
                if (recordPath == null)
                {
#if DEBUG
                    System.Windows.Forms.MessageBox.Show("RecordPath Error");
#endif
                }
                manager.captureManager.SetFileName(recordPath, true);
                break;

            case RecordOption.Playback:
                if (this.PlaybackFile != null)
                {
                    manager.captureManager.SetFileName(this.PlaybackFile, false);
                }
                else
                {
                    manager.captureManager.SetFileName(recordPath, false);
                }
                manager.captureManager.SetRealtime(false);

                if (m_playback_byframe)
                {
                    manager.captureManager.SetRealtime(false);
                    manager.captureManager.SetPause(true);
                }
                //manager.captureManager.SetRealtime(true);
                break;
            }
        }
Ejemplo n.º 35
0
    private bool InitializeRealSense()
    {
        pxcmStatus status;
        senseManager = PXCMSenseManager.CreateInstance();

        if (senseManager == null)
        {
            Debug.LogError("Unable to create SenseManager.");
            return false;
        }

        status = senseManager.EnableFace();

        if (status != pxcmStatus.PXCM_STATUS_NO_ERROR)
        {
            Debug.LogError("Couldn't enable the Face Module.");
            return false;
        }

        faceModule = senseManager.QueryFace();

        if (faceModule == null)
        {
            Debug.LogError("Couldn't query the Face Module.");
            return false;
        }

        PXCMFaceConfiguration faceConfiguration = faceModule.CreateActiveConfiguration();

        if (faceConfiguration == null)
        {
            Debug.LogError("Couldn't create an active configuration.");
            return false;
        }

        faceConfiguration.pose.isEnabled = true;
        faceConfiguration.pose.maxTrackedFaces = 1;

        PXCMFaceConfiguration.ExpressionsConfiguration expressionsConfiguration = faceConfiguration.QueryExpressions();
        expressionsConfiguration.Enable();

        status = expressionsConfiguration.EnableExpression(JumpExpression);
        if (status != pxcmStatus.PXCM_STATUS_NO_ERROR)
        {
            Debug.LogError("Unable to enable the expression " + JumpExpression + ".");
            return false;
        }

        status = expressionsConfiguration.EnableExpression(FireExpression);
        if (status != pxcmStatus.PXCM_STATUS_NO_ERROR)
        {
            Debug.LogError("Unable to enable the expression " + FireExpression + ".");
            return false;
        }

        status = faceConfiguration.ApplyChanges();

        if (status != pxcmStatus.PXCM_STATUS_NO_ERROR)
        {
            Debug.LogError("Unable to apply configuration settings.");
            return false;
        }

        faceData = faceModule.CreateOutput();
        if (faceData == null)
        {
            Debug.LogError("Couldn't create the data output object.");
            return false;
        }

        status = senseManager.Init();

        if (status != pxcmStatus.PXCM_STATUS_NO_ERROR)
        {
            Debug.LogError("Unable to initialize SenseManager.");
            return false;
        }

        PXCMSession session = senseManager.QuerySession();

        PXCMSmoother smoother;
        status = session.CreateImpl<PXCMSmoother>(out smoother);

        if (status != pxcmStatus.PXCM_STATUS_NO_ERROR)
        {
            Debug.LogError("Failed to create the smoother.");
            return false;
        }

        smoother2D = smoother.Create2DWeighted(10);

        return true;
    }
Ejemplo n.º 36
0
        void OnEnable()
        {
            Initialized = false;

            /* Create a SenseManager instance */
            SenseManager = PXCMSenseManager.CreateInstance();

            if (SenseManager == null)
            {
                print("Unable to create the pipeline instance");
                return;
            }

            if (_speechCommandsRef.Count != 0)
            {
                SetSenseOption(SenseOption.SenseOptionID.Speech);
            }

            int numberOfEnabledModalities = 0;

            //Set mode according to RunMode - play from file / record / live stream
            if (RunMode == MCTTypes.RunModes.PlayFromFile)
            {
                //CHECK IF FILE EXISTS
                if (!System.IO.File.Exists(FilePath))
                {
                    Debug.LogWarning("No Filepath Set Or File Doesn't Exist, Run Mode Will Be Changed to Live Stream");
                    RunMode = MCTTypes.RunModes.LiveStream;
                }
                else
                {
                    PXCMCaptureManager cManager = SenseManager.QueryCaptureManager();
                    cManager.SetFileName(FilePath, false);
                    Debug.Log("SenseToolkitManager: Playing from file: " + FilePath);
                }
            }

            if (RunMode == MCTTypes.RunModes.RecordToFile)
            {
                //CHECK IF PATH
                string PathOnly = FilePath;
                while (!PathOnly[PathOnly.Length - 1].Equals('\\'))
                {
                    PathOnly = PathOnly.Remove(PathOnly.Length - 1, 1);
                }

                if (!System.IO.Directory.Exists(PathOnly))
                {
                    Debug.LogWarning("No Filepath Set Or Path Doesn't Exist, Run Mode Will Be Changed to Live Stream");
                    RunMode = MCTTypes.RunModes.LiveStream;
                }
                else
                {
                    PXCMCaptureManager cManager = SenseManager.QueryCaptureManager();
                    cManager.SetFileName(FilePath, true);
                    Debug.Log("SenseToolkitManager: Recording to file: " + FilePath);
                }
            }

            /* Enable modalities according to the set options*/
            if (IsSenseOptionSet(SenseOption.SenseOptionID.Face, true))
            {
                SenseManager.EnableFace();
                _senseOptions.Find(i => i.ID == SenseOption.SenseOptionID.Face).Initialized = true;
                _senseOptions.Find(i => i.ID == SenseOption.SenseOptionID.Face).Enabled     = true;
                SetSenseOption(SenseOption.SenseOptionID.VideoColorStream);
                numberOfEnabledModalities++;
            }

            if (IsSenseOptionSet(SenseOption.SenseOptionID.Hand, true))
            {
                _sts = SenseManager.EnableHand();
                _senseOptions.Find(i => i.ID == SenseOption.SenseOptionID.Hand).Initialized = true;
                _senseOptions.Find(i => i.ID == SenseOption.SenseOptionID.Hand).Enabled     = true;
                numberOfEnabledModalities++;
            }

            if (IsSenseOptionSet(SenseOption.SenseOptionID.Blob, true))
            {
                _sts = SenseManager.EnableBlob();
                _senseOptions.Find(i => i.ID == SenseOption.SenseOptionID.Blob).Initialized = true;
                _senseOptions.Find(i => i.ID == SenseOption.SenseOptionID.Blob).Enabled     = true;
                numberOfEnabledModalities++;
            }

            if (IsSenseOptionSet(SenseOption.SenseOptionID.Object, true))
            {
                _sts = SenseManager.EnableTracker();
                _senseOptions.Find(i => i.ID == SenseOption.SenseOptionID.Object).Initialized = true;
                _senseOptions.Find(i => i.ID == SenseOption.SenseOptionID.Object).Enabled     = true;
                numberOfEnabledModalities++;
            }

            if (IsSenseOptionSet(SenseOption.SenseOptionID.Speech, true))
            {
                if (!SpeechManager.IsInitialized)
                {
                    if (SpeechManager.InitalizeSpeech())
                    {
                        _senseOptions.Find(i => i.ID == SenseOption.SenseOptionID.Speech).Initialized = true;
                        _senseOptions.Find(i => i.ID == SenseOption.SenseOptionID.Speech).Enabled     = true;
                        numberOfEnabledModalities++;
                    }
                    else
                    {
                        UnsetSenseOption(SenseOption.SenseOptionID.Speech);
                    }
                }
                else
                {
                    _senseOptions.Find(i => i.ID == SenseOption.SenseOptionID.Speech).Initialized = true;
                    _senseOptions.Find(i => i.ID == SenseOption.SenseOptionID.Speech).Enabled     = true;
                    numberOfEnabledModalities++;
                }
            }

            if (IsSenseOptionSet(SenseOption.SenseOptionID.VideoDepthStream, true) ||
                IsSenseOptionSet(SenseOption.SenseOptionID.PointCloud, true))
            {
                SenseManager.EnableStream(PXCMCapture.StreamType.STREAM_TYPE_DEPTH, 0, 0, 0);
                _senseOptions.Find(i => i.ID == SenseOption.SenseOptionID.VideoDepthStream).Initialized = true;
                _senseOptions.Find(i => i.ID == SenseOption.SenseOptionID.VideoDepthStream).Enabled     = true;
                numberOfEnabledModalities++;
            }

            if (IsSenseOptionSet(SenseOption.SenseOptionID.VideoIRStream, true))
            {
                SenseManager.EnableStream(PXCMCapture.StreamType.STREAM_TYPE_IR, 0, 0, 0);
                _senseOptions.Find(i => i.ID == SenseOption.SenseOptionID.VideoIRStream).Initialized = true;
                _senseOptions.Find(i => i.ID == SenseOption.SenseOptionID.VideoIRStream).Enabled     = true;
                numberOfEnabledModalities++;
            }

            if (IsSenseOptionSet(SenseOption.SenseOptionID.VideoColorStream, true))
            {
                if (ColorImageQuality == MCTTypes.RGBQuality.FullHD)
                {
                    SenseManager.EnableStream(PXCMCapture.StreamType.STREAM_TYPE_COLOR, 1920, 1080, 0);
                }
                else if (ColorImageQuality == MCTTypes.RGBQuality.HD)
                {
                    SenseManager.EnableStream(PXCMCapture.StreamType.STREAM_TYPE_COLOR, 1280, 720, 0);
                }
                else if (ColorImageQuality == MCTTypes.RGBQuality.HalfHD)
                {
                    SenseManager.EnableStream(PXCMCapture.StreamType.STREAM_TYPE_COLOR, 960, 540, 0);
                }
                else
                {
                    SenseManager.EnableStream(PXCMCapture.StreamType.STREAM_TYPE_COLOR, 640, 480, 0);
                }
                _senseOptions.Find(i => i.ID == SenseOption.SenseOptionID.VideoColorStream).Initialized = true;
                _senseOptions.Find(i => i.ID == SenseOption.SenseOptionID.VideoColorStream).Enabled     = true;
                numberOfEnabledModalities++;
            }

            if (IsSenseOptionSet(SenseOption.SenseOptionID.VideoSegmentation, true))
            {
                if (ColorImageQuality == MCTTypes.RGBQuality.FullHD)
                {
                    SenseManager.EnableStream(PXCMCapture.StreamType.STREAM_TYPE_COLOR, 1920, 1080, 0);
                }
                else if (ColorImageQuality == MCTTypes.RGBQuality.HD)
                {
                    SenseManager.EnableStream(PXCMCapture.StreamType.STREAM_TYPE_COLOR, 1280, 720, 0);
                }
                else if (ColorImageQuality == MCTTypes.RGBQuality.HalfHD)
                {
                    SenseManager.EnableStream(PXCMCapture.StreamType.STREAM_TYPE_COLOR, 960, 540, 0);
                }
                else
                {
                    SenseManager.EnableStream(PXCMCapture.StreamType.STREAM_TYPE_COLOR, 640, 480, 0);
                }
                SenseManager.Enable3DSeg();
                _senseOptions.Find(i => i.ID == SenseOption.SenseOptionID.VideoSegmentation).Initialized = true;
                _senseOptions.Find(i => i.ID == SenseOption.SenseOptionID.VideoSegmentation).Enabled     = true;
                numberOfEnabledModalities++;
            }

            /* Initialize the execution */
            _sts = SenseManager.Init();
            if (_sts < pxcmStatus.PXCM_STATUS_NO_ERROR)
            {
                return;
            }

            //Set different configurations:

            // Face
            if (IsSenseOptionSet(SenseOption.SenseOptionID.Face, true))
            {
                var faceModule = SenseManager.QueryFace();
                if (faceModule == null)
                {
                    throw new UnityException("QueryFace returned null");
                }

                var faceConfiguration = faceModule.CreateActiveConfiguration();
                if (faceConfiguration == null)
                {
                    throw new UnityException("CreateActiveConfiguration returned null");
                }

                faceConfiguration.Update();

                faceConfiguration.detection.isEnabled      = true;
                faceConfiguration.detection.smoothingLevel = PXCMFaceConfiguration.SmoothingLevelType.SMOOTHING_DISABLED;

                faceConfiguration.landmarks.isEnabled      = true;
                faceConfiguration.landmarks.smoothingLevel = PXCMFaceConfiguration.SmoothingLevelType.SMOOTHING_DISABLED;

                faceConfiguration.pose.isEnabled      = true;
                faceConfiguration.pose.smoothingLevel = PXCMFaceConfiguration.SmoothingLevelType.SMOOTHING_DISABLED;

                faceConfiguration.DisableAllAlerts();

                faceConfiguration.strategy = PXCMFaceConfiguration.TrackingStrategyType.STRATEGY_APPEARANCE_TIME;

                if ((NumberOfDetectedFaces < 1) || (NumberOfDetectedFaces > 15))
                {
                    Debug.Log("Ilegal value for Number Of Detected Faces, value is set to 1");
                    NumberOfDetectedFaces = 1;
                }

                faceConfiguration.detection.maxTrackedFaces = NumberOfDetectedFaces;
                faceConfiguration.landmarks.maxTrackedFaces = NumberOfDetectedFaces;
                faceConfiguration.pose.maxTrackedFaces      = NumberOfDetectedFaces;

                PXCMFaceConfiguration.ExpressionsConfiguration expressionConfig = faceConfiguration.QueryExpressions();
                if (expressionConfig == null)
                {
                    throw new UnityException("QueryExpressions returned null");
                }

                expressionConfig.Enable();
                expressionConfig.EnableAllExpressions();


                faceConfiguration.ApplyChanges();
                faceConfiguration.Dispose();

                FaceModuleOutput = faceModule.CreateOutput();

                UnsetSenseOption(SenseOption.SenseOptionID.VideoColorStream);
            }

            // Hand
            if (IsSenseOptionSet(SenseOption.SenseOptionID.Hand, true))
            {
                PXCMHandModule handAnalysis = SenseManager.QueryHand();
                if (handAnalysis == null)
                {
                    throw new UnityException("QueryHand returned null");
                }

                PXCMHandConfiguration handConfiguration = handAnalysis.CreateActiveConfiguration();
                if (handConfiguration == null)
                {
                    throw new UnityException("CreateActiveConfiguration returned null");
                }

                handConfiguration.Update();
                handConfiguration.EnableAllGestures();
                handConfiguration.EnableStabilizer(true);
                handConfiguration.DisableAllAlerts();
                handConfiguration.EnableSegmentationImage(false);
                handConfiguration.ApplyChanges();
                handConfiguration.Dispose();

                HandDataOutput = handAnalysis.CreateOutput();
            }

            // Blob
            if (IsSenseOptionSet(SenseOption.SenseOptionID.Blob, true))
            {
                PXCMBlobModule blobAnalysis = SenseManager.QueryBlob();
                if (blobAnalysis == null)
                {
                    throw new UnityException("QueryBlob returned null");
                }

                PXCMBlobConfiguration blobConfiguration = blobAnalysis.CreateActiveConfiguration();
                if (blobConfiguration == null)
                {
                    throw new UnityException("CreateActiveConfiguration returned null");
                }

                blobConfiguration.Update();
                blobConfiguration.EnableContourExtraction(true);
                blobConfiguration.EnableSegmentationImage(true);
                blobConfiguration.EnableStabilizer(true);
                blobConfiguration.SetMaxDistance(50 * 10);
                blobConfiguration.ApplyChanges();
                blobConfiguration.Dispose();

                BlobDataOutput = blobAnalysis.CreateOutput();
            }

            if (IsSenseOptionSet(SenseOption.SenseOptionID.Object, true))
            {
                if (_senseOptions.Find(i => i.ID == SenseOption.SenseOptionID.Object).Enabled != true)
                {
                    _senseOptions.Find(i => i.ID == SenseOption.SenseOptionID.Object).Enabled = true;
                    OnDisable();
                    OnEnable();
                }
            }

            if (IsSenseOptionSet(SenseOption.SenseOptionID.Speech, true))
            {
                UpdateSpeechCommands();
                SpeechManager.Start();
            }

            // Create an instance for the projection & blob extractor

            if (Projection == null)
            {
                Projection = SenseManager.QueryCaptureManager().QueryDevice().CreateProjection();
            }

            /* GZ
             *          if (BlobExtractor == null)
             *          {
             *                  SenseManager.session.CreateImpl<PXCMBlobExtractor>(out BlobExtractor);
             *          }*/

            // Set initialization flag
            Initialized = true;
        }
Ejemplo n.º 37
0
        private void FaceTrackingPipeline()
        {
            IsDispose = false;
            OnStart?.Invoke(this, null);

            #region Manager Init
            realSenseManager = RealSenseObjects.Session.CreateSenseManager();

            if (realSenseManager == null)
            {
                MessageBox.Show(
                    "PXCMSenseManager初始化失敗。",
                    "初始化失敗",
                    MessageBoxButtons.OK,
                    MessageBoxIcon.Error);
                OnStop?.Invoke(this, null);
                return;
            }

            PXCMCaptureManager captureManager = realSenseManager.captureManager;
            if (captureManager == null)
            {
                MessageBox.Show(
                    "PXCMCaptureManager初始化失敗。",
                    "初始化失敗",
                    MessageBoxButtons.OK,
                    MessageBoxIcon.Error);
                OnStop?.Invoke(this, null);
                return;
            }
            #endregion

            #region 基本設定
            //設定裝置
            captureManager.FilterByDeviceInfo(Form.SelectedDevice);

            //設定串流類型
            captureManager.FilterByStreamProfiles(Form.SelectedDeviceStreamProfile);


            //啟用臉部追蹤模組
            realSenseManager.EnableFace();
            PXCMFaceModule faceModule = realSenseManager.QueryFace();
            if (faceModule == null)
            {
                MessageBox.Show(
                    "取得PXCMFaceModule失敗。",
                    "初始化失敗",
                    MessageBoxButtons.OK,
                    MessageBoxIcon.Error);
                OnStop?.Invoke(this, null);
                return;
            }

            //建立臉部追蹤模組設定
            moduleConfiguration = faceModule.CreateActiveConfiguration();
            if (moduleConfiguration == null)
            {
                MessageBox.Show(
                    "建立PXCMFaceConfiguration失敗。",
                    "初始化失敗",
                    MessageBoxButtons.OK,
                    MessageBoxIcon.Error);
                OnStop?.Invoke(this, null);
                return;
            }
            //追蹤模式設定
            moduleConfiguration.SetTrackingMode(Form.ModeType);

            moduleConfiguration.strategy                  = PXCMFaceConfiguration.TrackingStrategyType.STRATEGY_RIGHT_TO_LEFT;
            moduleConfiguration.detection.isEnabled       = true;
            moduleConfiguration.detection.maxTrackedFaces = 4;//最大追蹤4個臉
            moduleConfiguration.landmarks.isEnabled       = false;
            moduleConfiguration.pose.isEnabled            = false;

            recognitionConfig =
                moduleConfiguration.QueryRecognition();

            if (recognitionConfig == null)
            {
                MessageBox.Show(
                    "建立RecognitionConfiguration失敗。",
                    "初始化失敗",
                    MessageBoxButtons.OK,
                    MessageBoxIcon.Error);
                OnStop?.Invoke(this, null);
                return;
            }

            recognitionConfig.Enable();
            #endregion

            #region 讀取資料庫數據
            if (Form.FaceData != null)
            {
                recognitionConfig.SetDatabase(Form.FaceData);
                moduleConfiguration.ApplyChanges();
            }
            #endregion

            #region 預備啟動
            moduleConfiguration.EnableAllAlerts();
            //moduleConfiguration.SubscribeAlert(FaceAlertHandler);

            pxcmStatus applyChangesStatus = moduleConfiguration.ApplyChanges();
            Form.SetStatus("RealSenseManager初始化中");
            if (applyChangesStatus.IsError() || realSenseManager.Init().IsError())
            {
                MessageBox.Show(
                    "RealSenseManager初始化失敗,請檢查設定正確。",
                    "初始化失敗",
                    MessageBoxButtons.OK,
                    MessageBoxIcon.Error);
                OnStop?.Invoke(this, null);
                return;
            }
            #endregion

            using (moduleOutput = faceModule.CreateOutput()) {
                PXCMCapture.Device.StreamProfileSet profiles;
                PXCMCapture.Device device = captureManager.QueryDevice();

                if (device == null)
                {
                    MessageBox.Show(
                        "取得設備失敗。",
                        "初始化失敗",
                        MessageBoxButtons.OK,
                        MessageBoxIcon.Error);
                    OnStop?.Invoke(this, null);
                    return;
                }

                device.QueryStreamProfileSet(PXCMCapture.StreamType.STREAM_TYPE_DEPTH, 0, out profiles);

                #region Loop
                while (!_Stop)
                {
                    while (_Paush)
                    {
                        Application.DoEvents();
                    }
                    if (realSenseManager.AcquireFrame(true).IsError())
                    {
                        break;
                    }
                    var isConnected = realSenseManager.IsConnected();
                    if (isConnected)
                    {
                        var sample = realSenseManager.QueryFaceSample();
                        if (sample == null)
                        {
                            realSenseManager.ReleaseFrame();
                            continue;
                        }
                        #region 畫面取出
                        PXCMImage image = null;
                        if (Form.ModeType == PXCMFaceConfiguration.TrackingModeType.FACE_MODE_IR)
                        {
                            image = sample.ir;
                        }
                        else
                        {
                            image = sample.color;
                        }
                        #endregion

                        moduleOutput.Update();//更新辨識
                        PXCMFaceConfiguration.RecognitionConfiguration recognition = moduleConfiguration.QueryRecognition();
                        if (recognition == null)
                        {
                            realSenseManager.ReleaseFrame();
                            continue;
                        }


                        #region 繪圖與事件
                        OnFrame?.Invoke(this, new FaceRecognitionEventArgs()
                        {
                            Image = ToBitmap(image)
                        });
                        FindFace(moduleOutput);
                        #endregion
                    }
                    //發布框
                    realSenseManager.ReleaseFrame();
                }
                #endregion

                //更新資料庫緩衝區
                //Buffer = moduleOutput.QueryRecognitionModule().GetDatabaseBuffer();
            }

            #region 釋放資源
            moduleConfiguration.Dispose();
            realSenseManager.Close();
            realSenseManager.Dispose();
            #endregion

            IsDispose = true;
            OnStop?.Invoke(this, null);
        }
Ejemplo n.º 38
0
        /// <summary>
        /// Starts this instance.
        /// </summary>
        public void Start()
        {
            if (_sm != null)
                throw new ResearchException("Camera is already started.");

            _sm =  PXCMSenseManager.CreateInstance();

            // Configure face detection.
            if (EnableFace)
            {
                _sm.EnableFace();
                var faceModule = _sm.QueryFace();
                using (PXCMFaceConfiguration faceConfig = faceModule.CreateActiveConfiguration())
                {
                    faceConfig.EnableAllAlerts();
                    faceConfig.pose.isEnabled = true;
                    faceConfig.pose.maxTrackedFaces = 4;

                    if (EnableExpression)
                    {
                        PXCMFaceConfiguration.ExpressionsConfiguration expression = faceConfig.QueryExpressions();
                        expression.Enable();
                        expression.EnableAllExpressions();
                        faceConfig.ApplyChanges();
                    }
                }
            }

            if (EnableEmotion)
            {
                // Configure emotion detection.
                _sm.EnableEmotion();
            }

            if (EnableStreaming)
            {
                // Configure streaming.
                _sm.EnableStream(PXCMCapture.StreamType.STREAM_TYPE_COLOR, 640, 480);
            //    _sm.EnableStream(PXCMCapture.StreamType.STREAM_TYPE_DEPTH, 640, 480);
            //    _sm.EnableStream(PXCMCapture.StreamType.STREAM_TYPE_IR, 640, 480);
            }

            // Event handler for data callbacks.
            var handler = new PXCMSenseManager.Handler {
                onModuleProcessedFrame=OnModuleProcessedFrame
            };

            _sm.Init(handler);

            // GO.
            Debug.WriteLine("{0} Starting streaming.", Time());
            _sm.StreamFrames(false);
            
            


            //Debug.WriteLine("{0} End streaming.", Time());
        }
        private void ConfigureRealSense()
        {
            PXCMFaceModule faceModule;
            PXCMFaceConfiguration faceConfig;

            // Start the SenseManager and session
            senseManager = PXCMSenseManager.CreateInstance();
            captureManager = senseManager.captureManager;

            // Enable the color stream
            senseManager.EnableStream(PXCMCapture.StreamType.STREAM_TYPE_COLOR, 640, 480, 60);
            //senseManager.EnableStream(PXCMCapture.StreamType.STREAM_TYPE_DEPTH, 640, 480, 0);

            // Enable the face module
            senseManager.EnableFace();
            faceModule = senseManager.QueryFace();
            faceConfig = faceModule.CreateActiveConfiguration();

            // Configure for 3D face tracking (if camera cannot support depth it will revert to 2D tracking)
            faceConfig.SetTrackingMode(PXCMFaceConfiguration.TrackingModeType.FACE_MODE_COLOR_PLUS_DEPTH);

            // Enable facial recognition
            recognitionConfig = faceConfig.QueryRecognition();
            recognitionConfig.Enable();

            //Enable Landmark Detection

            faceConfig.landmarks.isEnabled = true;
            // Create a recognition database
            PXCMFaceConfiguration.RecognitionConfiguration.RecognitionStorageDesc recognitionDesc = new PXCMFaceConfiguration.RecognitionConfiguration.RecognitionStorageDesc();
            recognitionDesc.maxUsers = DatabaseUsers;
            //recognitionConfig.CreateStorage(DatabaseName, out recognitionDesc);
            //recognitionConfig.UseStorage(DatabaseName);
            LoadDatabaseFromFile();
            recognitionConfig.SetRegistrationMode(PXCMFaceConfiguration.RecognitionConfiguration.RecognitionRegistrationMode.REGISTRATION_MODE_CONTINUOUS);

            // Apply changes and initialize
            faceConfig.ApplyChanges();
            senseManager.Init();
            faceData = faceModule.CreateOutput();

            // Mirror image
            senseManager.QueryCaptureManager().QueryDevice().SetMirrorMode(PXCMCapture.Device.MirrorMode.MIRROR_MODE_HORIZONTAL);
            // Release resources
            faceConfig.Dispose();
            faceModule.Dispose();
        }
Ejemplo n.º 40
0
        /// <summary>
        /// 面部
        /// </summary>
        public void FacePipeLine()
        {
            PXCMSenseManager pp = m_form.Session.CreateSenseManager();

            if (pp == null)
            {
                throw new Exception("PXCMSenseManager null");
            }

            pp.EnableStream(PXCMCapture.StreamType.STREAM_TYPE_COLOR, 640, 360);

            // 面部初始化
            pp.EnableFace();


            PXCMFaceModule faceModule = pp.QueryFace();

            if (faceModule == null)
            {
                Debug.Assert(true);
                return;
            }
            PXCMFaceConfiguration faceCfg = faceModule.CreateActiveConfiguration();

            if (faceCfg == null)
            {
                Debug.Assert(true);
                return;
            }
            faceCfg.SetTrackingMode(PXCMFaceConfiguration.TrackingModeType.FACE_MODE_COLOR_PLUS_DEPTH);
            faceCfg.strategy = PXCMFaceConfiguration.TrackingStrategyType.STRATEGY_CLOSEST_TO_FARTHEST;
            // 单个人追踪
            faceCfg.detection.maxTrackedFaces = NUM_PERSONS;
            faceCfg.landmarks.maxTrackedFaces = NUM_PERSONS;
            faceCfg.pose.maxTrackedFaces      = NUM_PERSONS;

            // 表情初始化
            PXCMFaceConfiguration.ExpressionsConfiguration expressionCfg = faceCfg.QueryExpressions();
            if (expressionCfg == null)
            {
                throw new Exception("ExpressionsConfiguration null");
            }
            expressionCfg.properties.maxTrackedFaces = NUM_PERSONS;

            expressionCfg.EnableAllExpressions();
            faceCfg.detection.isEnabled = true;
            faceCfg.landmarks.isEnabled = true;
            faceCfg.pose.isEnabled      = true;
            if (expressionCfg != null)
            {
                expressionCfg.Enable();
            }

            //脉搏初始化
            PXCMFaceConfiguration.PulseConfiguration pulseConfiguration = faceCfg.QueryPulse();
            if (pulseConfiguration == null)
            {
                throw new Exception("pulseConfiguration null");
            }

            pulseConfiguration.properties.maxTrackedFaces = NUM_PERSONS;
            if (pulseConfiguration != null)
            {
                pulseConfiguration.Enable();
            }

            // 面部识别功能初始化
            //PXCMFaceConfiguration.RecognitionConfiguration qrecognition = faceCfg.QueryRecognition();
            //if (qrecognition == null)
            //{
            //    throw new Exception("PXCMFaceConfiguration.RecognitionConfiguration null");
            //}
            //else
            //{
            //    qrecognition.Enable();
            //}

            faceCfg.ApplyChanges();

            if (pp.Init() < pxcmStatus.PXCM_STATUS_NO_ERROR)
            {
                throw new Exception("Init failed");
            }
            else
            {
                using (PXCMFaceData faceData = faceModule.CreateOutput())
                {
                    if (faceData == null)
                    {
                        throw new Exception("face data failure");
                    }
                    while (!m_form.Stopped)
                    {
                        if (pp.AcquireFrame(true).IsError())
                        {
                            break;
                        }

                        var isConnected = pp.IsConnected();
                        if (isConnected)
                        {
                            var sample = pp.QueryFaceSample();
                            if (sample == null)
                            {
                                pp.ReleaseFrame();
                                continue;
                            }

                            // default is COLOR
                            DisplayPicture(sample.color);

                            // 如果检测脸数==0,则continue
                            faceData.Update();
                            int nFace = faceData.QueryNumberOfDetectedFaces();
                            if (nFace == 0)
                            {
                                continue;
                            }

                            // 存
                            //PXCMFaceData.Face face = faceData.QueryFaceByIndex(0);
                            //SaveFaceLandmarkData(face);
                            //SaveFacialExpressionData(face);

                            // 获取脸部特征点数据
                            PXCMFaceData.Face face = faceData.QueryFaceByIndex(0);
                            Landmarks.updateData(face);

                            // 获取表情数据
                            PXCMFaceData.ExpressionsData edata = face.QueryExpressions();

                            // 多线程加锁,数据同步,与VideoModule会发生竞争
                            lock (EmotionModel.svmFeature)
                            {
                                if (edata != null)
                                {
                                    // 提取表情数据
                                    int startIdx = EmotionModel.FaceExpressionStartIdx;
                                    for (int i = 0; i < 22; i++)
                                    {
                                        PXCMFaceData.ExpressionsData.FaceExpressionResult score;
                                        edata.QueryExpression((PXCMFaceData.ExpressionsData.FaceExpression)i, out score);
                                        Expression.facialExpressionIndensity[i] = score.intensity;
                                        // 设置SVM Feature
                                        EmotionModel.svmFeature[startIdx + i].Index = startIdx + i;
                                        EmotionModel.svmFeature[startIdx + i].Value = score.intensity;
                                    }
                                }

                                // 提取特征点位置数据
                                int startIdx2 = EmotionModel.FaceLandmarkStartIdx;
                                for (int i = 0; i < EmotionModel.FaceLandmarkCnt; i++)
                                {
                                    // 设置SVM Feature
                                    EmotionModel.svmFeature[startIdx2 + i].Index = startIdx2 + i;
                                    EmotionModel.svmFeature[startIdx2 + i].Value = Landmarks.Landmarks[i];
                                }
                            }

                            m_form.UpdatePic();
                        }
                        pp.ReleaseFrame();
                    }
                }
            }



            pp.Close();
            pp.Dispose();
        }