public void Configure(RealSenseCamera camera)
        {
            _camera = camera;
            using (var faceModule = _camera.Manager.QueryFace()) {
                using (var moduleConfiguration = faceModule.CreateActiveConfiguration()) {
                    moduleConfiguration.detection.maxTrackedFaces = 1;
                    var expressionCofig = moduleConfiguration.QueryExpressions();
                    expressionCofig.Enable();
                    expressionCofig.EnableAllExpressions();

                    var desc = new PXCMFaceConfiguration.RecognitionConfiguration.RecognitionStorageDesc();
                    desc.maxUsers     = 10;
                    desc.isPersistent = true;
                    var recognitionConfiguration = moduleConfiguration.QueryRecognition();
                    recognitionConfiguration.CreateStorage(StorageName, out desc);
                    recognitionConfiguration.UseStorage(StorageName);
                    recognitionConfiguration.SetRegistrationMode(PXCMFaceConfiguration.RecognitionConfiguration.RecognitionRegistrationMode.REGISTRATION_MODE_CONTINUOUS);

                    if (File.Exists(StorageFileName))
                    {
                        var bytes = File.ReadAllBytes(StorageFileName);
                        recognitionConfiguration.SetDatabaseBuffer(bytes);
                    }
                    recognitionConfiguration.Enable();
                    moduleConfiguration.ApplyChanges();
                }
            }
        }
        public void Configure(RealSenseCamera camera) {
            _camera = camera;
            using (var faceModule = _camera.Manager.QueryFace()) {
                using (var moduleConfiguration = faceModule.CreateActiveConfiguration()) {
                    moduleConfiguration.detection.maxTrackedFaces = 1;
                    var expressionCofig = moduleConfiguration.QueryExpressions();
                    expressionCofig.Enable();
                    expressionCofig.EnableAllExpressions();

                    var desc = new PXCMFaceConfiguration.RecognitionConfiguration.RecognitionStorageDesc();
                    desc.maxUsers = 10;
                    desc.isPersistent = true;
                    var recognitionConfiguration = moduleConfiguration.QueryRecognition();
                    recognitionConfiguration.CreateStorage(StorageName, out desc);
                    recognitionConfiguration.UseStorage(StorageName);
                    recognitionConfiguration.SetRegistrationMode(PXCMFaceConfiguration.RecognitionConfiguration.RecognitionRegistrationMode.REGISTRATION_MODE_CONTINUOUS);

                    if (File.Exists(StorageFileName)) {
                        var bytes = File.ReadAllBytes(StorageFileName);
                        recognitionConfiguration.SetDatabaseBuffer(bytes);
                    }
                    recognitionConfiguration.Enable();
                    moduleConfiguration.ApplyChanges();
                }
            }
        }
示例#3
0
        public override void Start()
        {
            _cancellationToken = new CancellationTokenSource();
            _manager.EnableEmotion();
            _manager.EnableFace();

            using (var faceModule = _manager.QueryFace()) {
                using (var moduleConfiguration = faceModule.CreateActiveConfiguration()) {
                    moduleConfiguration.detection.maxTrackedFaces = 1;
                    var expressionCofig = moduleConfiguration.QueryExpressions();
                    expressionCofig.Enable();
                    expressionCofig.EnableAllExpressions();

                    var desc = new PXCMFaceConfiguration.RecognitionConfiguration.RecognitionStorageDesc();
                    desc.maxUsers     = 10;
                    desc.isPersistent = true;
                    var recognitionConfiguration = moduleConfiguration.QueryRecognition();
                    recognitionConfiguration.CreateStorage(StorageName, out desc);
                    recognitionConfiguration.UseStorage(StorageName);
                    recognitionConfiguration.SetRegistrationMode(PXCMFaceConfiguration.RecognitionConfiguration.RecognitionRegistrationMode.REGISTRATION_MODE_CONTINUOUS);

                    if (File.Exists(StorageFileName))
                    {
                        var bytes = File.ReadAllBytes(StorageFileName);
                        recognitionConfiguration.SetDatabaseBuffer(bytes);
                    }
                    recognitionConfiguration.Enable();
                    moduleConfiguration.ApplyChanges();
                }
            }

            _manager.EnableHand();
            using (var handModule = _manager.QueryHand()) {
                using (var handConfig = handModule.CreateActiveConfiguration()) {
                    //handConfig.EnableAllAlerts();
                    int numGestures = handConfig.QueryGesturesTotalNumber();
                    for (int i = 0; i < numGestures; i++)
                    {
                        string name;
                        handConfig.QueryGestureNameByIndex(i, out name);
                        Debug.WriteLine("Gestures: " + name);
                    }
                    handConfig.EnableAllGestures();
                    handConfig.SubscribeGesture(OnGesture);
                    handConfig.EnableTrackedJoints(true);
                    handConfig.ApplyChanges();
                }
            }
            Debug.WriteLine("Initializing Camera...");

            var status = _manager.Init();

            if (status != NoError)
            {
                throw new CameraException(status.ToString());
            }
            Task.Factory.StartNew(Loop,
                                  TaskCreationOptions.LongRunning,
                                  _cancellationToken.Token);
        }
示例#4
0
        private void ConfigureRealSense(PXCMSession session)
        {
            PXCMFaceModule        faceModule;
            PXCMFaceConfiguration faceConfig;

            // Start the SenseManager and session
            senseManager = session.CreateSenseManager();

            // Enable the color stream
            senseManager.EnableStream(PXCMCapture.StreamType.STREAM_TYPE_COLOR, 640, 480, 30);

            // Enable the face module
            senseManager.EnableFace();
            faceModule = senseManager.QueryFace();
            faceConfig = faceModule.CreateActiveConfiguration();

            // Configure for 3D face tracking (if camera cannot support depth it will revert to 2D tracking)
            faceConfig.SetTrackingMode(PXCMFaceConfiguration.TrackingModeType.FACE_MODE_COLOR_PLUS_DEPTH);

            // Enable landmarks detection
            faceConfig.landmarks.isEnabled = true;

            // Enable pose detection
            faceConfig.pose.isEnabled = true;

            // Enable facial recognition
            recogConfig = faceConfig.QueryRecognition();
            recogConfig.Enable();

            // Create a recognition database storage
            PXCMFaceConfiguration.RecognitionConfiguration.RecognitionStorageDesc recognitionDesc = new PXCMFaceConfiguration.RecognitionConfiguration.RecognitionStorageDesc();
            recognitionDesc.maxUsers = MAX_USERS;
            recogConfig.CreateStorage(DB_NAME, out recognitionDesc);
            recogConfig.UseStorage(DB_NAME);
            isDBLoaded = LoadDB();
            recogConfig.SetRegistrationMode(PXCMFaceConfiguration.RecognitionConfiguration.RecognitionRegistrationMode.REGISTRATION_MODE_CONTINUOUS);

            // Apply changes and initialize
            faceConfig.ApplyChanges();
            senseManager.Init();
            faceData = faceModule.CreateOutput();

            // Mirror image
            senseManager.QueryCaptureManager().QueryDevice().SetMirrorMode(PXCMCapture.Device.MirrorMode.MIRROR_MODE_HORIZONTAL);

            //IVCam Setting
            senseManager.QueryCaptureManager().QueryDevice().SetIVCAMFilterOption(7);
            senseManager.QueryCaptureManager().QueryDevice().SetIVCAMAccuracy(PXCMCapture.Device.IVCAMAccuracy.IVCAM_ACCURACY_COARSE);
            senseManager.QueryCaptureManager().QueryDevice().SetIVCAMLaserPower(16);
            senseManager.QueryCaptureManager().QueryDevice().SetIVCAMMotionRangeTradeOff(100);

            // Release resources
            faceConfig.Dispose();
            faceModule.Dispose();
        }
        private void ConfigureRealSense()
        {
            PXCMFaceModule        faceModule;
            PXCMFaceConfiguration faceConfig;

            // Start the SenseManager and session
            senseManager = PXCMSenseManager.CreateInstance();

            // Enable the color stream
            senseManager.EnableStream(PXCMCapture.StreamType.STREAM_TYPE_COLOR, 640, 480, 30);
            //senseManager.EnableStream(PXCMCapture.StreamType.STREAM_TYPE_COLOR, 550, 550, 30);

            // Enable the face module
            senseManager.EnableFace();
            //senseManager.EnableHand();

            faceModule = senseManager.QueryFace();
            faceConfig = faceModule.CreateActiveConfiguration();

            // Configure for 3D face tracking (if camera cannot support depth it will revert to 2D tracking)
            faceConfig.SetTrackingMode(PXCMFaceConfiguration.TrackingModeType.FACE_MODE_COLOR_PLUS_DEPTH);

            expressionConfiguration = faceConfig.QueryExpressions();
            expressionConfiguration.Enable();
            expressionConfiguration.EnableAllExpressions();


            // Enable facial recognition
            recognitionConfig = faceConfig.QueryRecognition();
            recognitionConfig.Enable();

            // Create a recognition database
            PXCMFaceConfiguration.RecognitionConfiguration.RecognitionStorageDesc recognitionDesc = new PXCMFaceConfiguration.RecognitionConfiguration.RecognitionStorageDesc();
            recognitionDesc.maxUsers = DatabaseUsers;
            recognitionConfig.CreateStorage(DatabaseName, out recognitionDesc);
            recognitionConfig.UseStorage(DatabaseName);
            LoadDatabaseFromFile();
            recognitionConfig.SetRegistrationMode(PXCMFaceConfiguration.RecognitionConfiguration.RecognitionRegistrationMode.REGISTRATION_MODE_CONTINUOUS);

            // Apply changes and initialize
            faceConfig.ApplyChanges();
            senseManager.Init();
            faceData = faceModule.CreateOutput();

            // Mirror image
            senseManager.QueryCaptureManager().QueryDevice().SetMirrorMode(PXCMCapture.Device.MirrorMode.MIRROR_MODE_HORIZONTAL);

            // Release resources
            faceConfig.Dispose();
            faceModule.Dispose();
        }
        private void ConfigureRealSense()
        {
            PXCMFaceModule faceModule;
            PXCMFaceConfiguration faceConfig;

            // Start the SenseManager and session
            senseManager = PXCMSenseManager.CreateInstance();
            captureManager = senseManager.captureManager;

            // Enable the color stream
            senseManager.EnableStream(PXCMCapture.StreamType.STREAM_TYPE_COLOR, 640, 480, 60);
            //senseManager.EnableStream(PXCMCapture.StreamType.STREAM_TYPE_DEPTH, 640, 480, 0);

            // Enable the face module
            senseManager.EnableFace();
            faceModule = senseManager.QueryFace();
            faceConfig = faceModule.CreateActiveConfiguration();

            // Configure for 3D face tracking (if camera cannot support depth it will revert to 2D tracking)
            faceConfig.SetTrackingMode(PXCMFaceConfiguration.TrackingModeType.FACE_MODE_COLOR_PLUS_DEPTH);

            // Enable facial recognition
            recognitionConfig = faceConfig.QueryRecognition();
            recognitionConfig.Enable();

            //Enable Landmark Detection

            faceConfig.landmarks.isEnabled = true;
            // Create a recognition database
            PXCMFaceConfiguration.RecognitionConfiguration.RecognitionStorageDesc recognitionDesc = new PXCMFaceConfiguration.RecognitionConfiguration.RecognitionStorageDesc();
            recognitionDesc.maxUsers = DatabaseUsers;
            //recognitionConfig.CreateStorage(DatabaseName, out recognitionDesc);
            //recognitionConfig.UseStorage(DatabaseName);
            LoadDatabaseFromFile();
            recognitionConfig.SetRegistrationMode(PXCMFaceConfiguration.RecognitionConfiguration.RecognitionRegistrationMode.REGISTRATION_MODE_CONTINUOUS);

            // Apply changes and initialize
            faceConfig.ApplyChanges();
            senseManager.Init();
            faceData = faceModule.CreateOutput();

            // Mirror image
            senseManager.QueryCaptureManager().QueryDevice().SetMirrorMode(PXCMCapture.Device.MirrorMode.MIRROR_MODE_HORIZONTAL);
            // Release resources
            faceConfig.Dispose();
            faceModule.Dispose();
        }
        private void InitializeFace()
        {
            // 顔検出を有効にする
            var sts = senceManager.EnableFace();
            if (sts < pxcmStatus.PXCM_STATUS_NO_ERROR)
            {
                throw new Exception("顔検出の有効化に失敗しました");
            }

            //顔検出器を生成する
            var faceModule = senceManager.QueryFace();

            //顔検出のプロパティを取得
            PXCMFaceConfiguration config = faceModule.CreateActiveConfiguration();
            config.SetTrackingMode(PXCMFaceConfiguration.TrackingModeType.FACE_MODE_COLOR_PLUS_DEPTH);

            config.ApplyChanges();

            //追加:顔識別のプロパティを取得
            PXCMFaceConfiguration.RecognitionConfiguration rcfg = config.QueryRecognition();

            //追加:顔識別を有効化
            rcfg.Enable();

            //追加:顔識別用データベースの用意
            PXCMFaceConfiguration.RecognitionConfiguration.RecognitionStorageDesc desc = new PXCMFaceConfiguration.RecognitionConfiguration.RecognitionStorageDesc();
            desc.maxUsers = 10;
            rcfg.CreateStorage("MyDB", out desc);
            rcfg.UseStorage("MyDB");

            //追加:顔識別の登録の設定を行う
            rcfg.SetRegistrationMode(PXCMFaceConfiguration.RecognitionConfiguration.RecognitionRegistrationMode.REGISTRATION_MODE_CONTINUOUS);

            // パイプラインを初期化する
            pxcmStatus ret = senceManager.Init();
            if (ret < pxcmStatus.PXCM_STATUS_NO_ERROR)
            {
                throw new Exception("初期化に失敗しました");
            }

            // デバイス情報の取得
            PXCMCapture.Device device = senceManager.QueryCaptureManager().QueryDevice();
            if (device == null)
            {
                throw new Exception("deviceの生成に失敗しました");
            }

            // ミラー表示にする
            device.SetMirrorMode(PXCMCapture.Device.MirrorMode.MIRROR_MODE_HORIZONTAL);

            PXCMCapture.DeviceInfo deviceInfo;
            device.QueryDeviceInfo(out deviceInfo);
            if (deviceInfo.model == PXCMCapture.DeviceModel.DEVICE_MODEL_IVCAM)
            {
                device.SetDepthConfidenceThreshold(1);
                device.SetIVCAMFilterOption(6);
                device.SetIVCAMMotionRangeTradeOff(21);
            }

            config.detection.isEnabled = true;
            config.detection.maxTrackedFaces = DETECTION_MAXFACES;
            config.QueryRecognition().Enable();
            config.ApplyChanges();

            faceData = faceModule.CreateOutput();
        }
示例#8
0
        public override void Start()
        {
            _cancellationToken = new CancellationTokenSource();
            _manager.EnableEmotion();
            _manager.EnableFace();

            using (var faceModule = _manager.QueryFace()) {
                using (var moduleConfiguration = faceModule.CreateActiveConfiguration()) {
                    moduleConfiguration.detection.maxTrackedFaces = 1;
                    var expressionCofig = moduleConfiguration.QueryExpressions();
                    expressionCofig.Enable();
                    expressionCofig.EnableAllExpressions();

                    var desc = new PXCMFaceConfiguration.RecognitionConfiguration.RecognitionStorageDesc();
                    desc.maxUsers = 10;
                    desc.isPersistent = true;
                    var recognitionConfiguration = moduleConfiguration.QueryRecognition();
                    recognitionConfiguration.CreateStorage(StorageName, out desc);
                    recognitionConfiguration.UseStorage(StorageName);
                    recognitionConfiguration.SetRegistrationMode(PXCMFaceConfiguration.RecognitionConfiguration.RecognitionRegistrationMode.REGISTRATION_MODE_CONTINUOUS);

                    if (File.Exists(StorageFileName)) {
                        var bytes = File.ReadAllBytes(StorageFileName);
                        recognitionConfiguration.SetDatabaseBuffer(bytes);
                    }
                    recognitionConfiguration.Enable();
                    moduleConfiguration.ApplyChanges();
                }
            }

            _manager.EnableHand();
            using (var handModule = _manager.QueryHand()) {
                using (var handConfig = handModule.CreateActiveConfiguration()) {
                    //handConfig.EnableAllAlerts();
                    int numGestures = handConfig.QueryGesturesTotalNumber();
                    for (int i = 0; i < numGestures; i++) {
                        string name;
                        handConfig.QueryGestureNameByIndex(i, out name);
                        Debug.WriteLine("Gestures: " + name);
                    }
                    handConfig.EnableAllGestures();
                    handConfig.SubscribeGesture(OnGesture);
                    handConfig.EnableTrackedJoints(true);
                    handConfig.ApplyChanges();
                }
            }
            Debug.WriteLine("Initializing Camera...");

            var status = _manager.Init();
            if (status != NoError) {
                throw new CameraException(status.ToString());
            }
            Task.Factory.StartNew(Loop,
                                  TaskCreationOptions.LongRunning,
                                  _cancellationToken.Token);
        }
示例#9
0
        private void ConfigureRealSense()
        {
            PXCMFaceModule faceModule;



            // Start the SenseManager and session
            senseManager = PXCMSenseManager.CreateInstance();
            senseManager.captureManager.SetFileName("recorded_video.wm", true);
            senseManager.EnableStream(PXCMCapture.StreamType.STREAM_TYPE_COLOR, 640, 480, 0);

            senseManager.Init();

            senseManager.captureManager.SetRealtime(false);
            senseManager.captureManager.SetPause(true);

            // Enable the color stream
            //senseManager.EnableStream(PXCMCapture.StreamType.STREAM_TYPE_COLOR, 640, 480, 60);
            //60 0r 0 for fps?

            // Enable the face module
            senseManager.EnableFace();
            faceModule = senseManager.QueryFace();
            faceConfig = faceModule.CreateActiveConfiguration();
            faceConfig.detection.isEnabled = true;
            expressionConfig = faceConfig.QueryExpressions();
            expressionConfig.Enable();
            expressionConfig.EnableAllExpressions();
            faceConfig.EnableAllAlerts();



            // Configure for 3D face tracking (if camera cannot support depth it will revert to 2D tracking)
            faceConfig.SetTrackingMode(PXCMFaceConfiguration.TrackingModeType.FACE_MODE_COLOR_PLUS_DEPTH);

            // Enable facial recognition
            recognitionConfig = faceConfig.QueryRecognition();
            recognitionConfig.Enable();


            // Create a recognition database
            PXCMFaceConfiguration.RecognitionConfiguration.RecognitionStorageDesc recognitionDesc = new PXCMFaceConfiguration.RecognitionConfiguration.RecognitionStorageDesc();
            recognitionDesc.maxUsers = DatabaseUsers;
            recognitionConfig.CreateStorage(DatabaseName, out recognitionDesc);
            recognitionConfig.UseStorage(DatabaseName);
            LoadDatabaseFromFile();
            recognitionConfig.SetRegistrationMode(PXCMFaceConfiguration.RecognitionConfiguration.RecognitionRegistrationMode.REGISTRATION_MODE_CONTINUOUS);

            // Apply changes and initialize
            faceConfig.ApplyChanges();
            senseManager.Init();
            faceData = faceModule.CreateOutput();
            int numFaces = faceData.QueryNumberOfDetectedFaces();

            Console.WriteLine("number of detected faces", numFaces);

            // Mirror image
            senseManager.QueryCaptureManager().QueryDevice().SetMirrorMode(PXCMCapture.Device.MirrorMode.MIRROR_MODE_HORIZONTAL);

            // Release resources
            faceConfig.Dispose();
            faceModule.Dispose();
        }
示例#10
0
        private void InitializeFace()
        {
            // 顔検出を有効にする
            var sts = senceManager.EnableFace();

            if (sts < pxcmStatus.PXCM_STATUS_NO_ERROR)
            {
                throw new Exception("顔検出の有効化に失敗しました");
            }

            //顔検出器を生成する
            var faceModule = senceManager.QueryFace();

            //顔検出のプロパティを取得
            PXCMFaceConfiguration config = faceModule.CreateActiveConfiguration();

            config.SetTrackingMode(PXCMFaceConfiguration.TrackingModeType.FACE_MODE_COLOR_PLUS_DEPTH);

            config.ApplyChanges();

            //追加:顔識別のプロパティを取得
            PXCMFaceConfiguration.RecognitionConfiguration rcfg = config.QueryRecognition();

            //追加:顔識別を有効化
            rcfg.Enable();

            //追加:顔識別用データベースの用意
            PXCMFaceConfiguration.RecognitionConfiguration.RecognitionStorageDesc desc = new PXCMFaceConfiguration.RecognitionConfiguration.RecognitionStorageDesc();
            desc.maxUsers = 10;
            rcfg.CreateStorage("MyDB", out desc);
            rcfg.UseStorage("MyDB");

            //追加:顔識別の登録の設定を行う
            rcfg.SetRegistrationMode(PXCMFaceConfiguration.RecognitionConfiguration.RecognitionRegistrationMode.REGISTRATION_MODE_CONTINUOUS);

            // パイプラインを初期化する
            pxcmStatus ret = senceManager.Init();

            if (ret < pxcmStatus.PXCM_STATUS_NO_ERROR)
            {
                throw new Exception("初期化に失敗しました");
            }

            // デバイス情報の取得
            PXCMCapture.Device device = senceManager.QueryCaptureManager().QueryDevice();
            if (device == null)
            {
                throw new Exception("deviceの生成に失敗しました");
            }


            // ミラー表示にする
            device.SetMirrorMode(PXCMCapture.Device.MirrorMode.MIRROR_MODE_HORIZONTAL);

            PXCMCapture.DeviceInfo deviceInfo;
            device.QueryDeviceInfo(out deviceInfo);
            if (deviceInfo.model == PXCMCapture.DeviceModel.DEVICE_MODEL_IVCAM)
            {
                device.SetDepthConfidenceThreshold(1);
                device.SetIVCAMFilterOption(6);
                device.SetIVCAMMotionRangeTradeOff(21);
            }


            config.detection.isEnabled       = true;
            config.detection.maxTrackedFaces = DETECTION_MAXFACES;
            config.QueryRecognition().Enable();
            config.ApplyChanges();

            faceData = faceModule.CreateOutput();
        }
        //---------------------------------------------------ConfigureRealSense-----------------------------------------------------------------------------------------------------



        public static void ConfigureRealSense() //Configura o pipeline para processamento
        {
            /* Start the SenseManager and session
             * Cria a instancia de PXCMSenseManager em senseManager para
             * interface com algoritmos pré-definidos ex: Reconhecimento facial
             */
            senseManager = PXCMSenseManager.CreateInstance(); //private PXCMSenseManager senseManager;

            /* Enable the color stream
             * Habilita a transmissão de cor
             */
            ///////senseManager.EnableStream(PXCMCapture.StreamType.STREAM_TYPE_COLOR, 640, 480, 30);

            /* Enable the face module
             * Ativa o Rastreamento de face no pipeline
             */
            senseManager.EnableFace();
            PXCMFaceModule faceModule = senseManager.QueryFace(); //Retorna a instância do módulo da face (FaceModule)

            /* O CreateActiveConfiguration função retorna uma nova instância do FaceConfiguration exemplo,
             * que o aplicativo usa para configurar o módulo de rastreamento de face.
             * O pedido deve liberar a instância após o uso.
             */
            PXCMFaceConfiguration faceConfig = faceModule.CreateActiveConfiguration();

            // Configure for 3D face tracking (if camera cannot support depth it will revert to 2D tracking)
            faceConfig.SetTrackingMode(PXCMFaceConfiguration.TrackingModeType.FACE_MODE_COLOR_PLUS_DEPTH);

            /* Enable facial recognition
             * Cria uma interface de configuração do algoritmo de reconhecimento facial
             * Neste algoritmo já tem as opções de CRUD, só é preciso configura-las
             */
            recognitionConfig = faceConfig.QueryRecognition(); //private PXCMFaceConfiguration.RecognitionConfiguration recognitionConfig;
            recognitionConfig.Enable();                        //faceModule.CreateActiveConfiguration().QueryRecognition().Enable();

            /* Create a recognition database
             * A estrutura 'RecognitionStorageDesc' descreve os parâmetros de configuração de banco de dados de reconhecimento.
             */
            PXCMFaceConfiguration.RecognitionConfiguration.RecognitionStorageDesc storageDesc = new PXCMFaceConfiguration.RecognitionConfiguration.RecognitionStorageDesc();
            storageDesc.maxUsers = DatabaseUsers;                           //Numero máximo de usuários suportado pelo banco de dados
            recognitionConfig.CreateStorage(DatabaseName, out storageDesc); //Cria o banco de dados passando a descrição configurada em 'storageDesc'
            recognitionConfig.UseStorage(DatabaseName);                     //Coloca o banco de dados em uso

            /* Verifica se o arquivo existe e se existir, executa 'SetDatabaseBuffer'
             * para substitui o banco de dados de reconhecimento
             */
            LoadDatabaseFromFile();

            /* REGISTRATION_MODE_CONTINUOUS registrar os usuários automaticamente.
             * REGISTRATION_MODE_ON_DEMAND  registar os usuários mediante solicitação.
             */
            recognitionConfig.SetRegistrationMode(PXCMFaceConfiguration.RecognitionConfiguration.RecognitionRegistrationMode.REGISTRATION_MODE_CONTINUOUS);


            faceConfig.ApplyChanges(); // Aplica as configurações feitas no Modulo de Reconhecimento facial

            /* A função 'Init' inicializa o pipeline
             * (Isso configura o pipeline de processamento que recebe e processa os dados do hardware)
             */
            senseManager.Init();

            /* A função 'CreateOutput' retorna uma nova instância do 'FaceData' exemplo, que o aplicativo usa para armazenar e recuperar
             *  os dados de saída do módulo de rastreamento de face.
             *  O pedido deve liberar a instância após o uso.
             */
            faceData = faceModule.CreateOutput();

            /* A função 'SetMirrorMode' define a orientação das imagens da câmera.
             */
            senseManager.QueryCaptureManager().QueryDevice().SetMirrorMode(PXCMCapture.Device.MirrorMode.MIRROR_MODE_HORIZONTAL);

            // Libera os recursos
            faceConfig.Dispose();
            faceModule.Dispose();
        }