Beispiel #1
0
        private void ConfigureRealSense(PXCMSession session)
        {
            PXCMFaceModule        faceModule;
            PXCMFaceConfiguration faceConfig;

            // Start the SenseManager and session
            senseManager = session.CreateSenseManager();

            // Enable the color stream
            senseManager.EnableStream(PXCMCapture.StreamType.STREAM_TYPE_COLOR, 640, 480, 30);

            // Enable the face module
            senseManager.EnableFace();
            faceModule = senseManager.QueryFace();
            faceConfig = faceModule.CreateActiveConfiguration();

            // Configure for 3D face tracking (if camera cannot support depth it will revert to 2D tracking)
            faceConfig.SetTrackingMode(PXCMFaceConfiguration.TrackingModeType.FACE_MODE_COLOR_PLUS_DEPTH);

            // Enable landmarks detection
            faceConfig.landmarks.isEnabled = true;

            // Enable pose detection
            faceConfig.pose.isEnabled = true;

            // Enable facial recognition
            recogConfig = faceConfig.QueryRecognition();
            recogConfig.Enable();

            // Create a recognition database storage
            PXCMFaceConfiguration.RecognitionConfiguration.RecognitionStorageDesc recognitionDesc = new PXCMFaceConfiguration.RecognitionConfiguration.RecognitionStorageDesc();
            recognitionDesc.maxUsers = MAX_USERS;
            recogConfig.CreateStorage(DB_NAME, out recognitionDesc);
            recogConfig.UseStorage(DB_NAME);
            isDBLoaded = LoadDB();
            recogConfig.SetRegistrationMode(PXCMFaceConfiguration.RecognitionConfiguration.RecognitionRegistrationMode.REGISTRATION_MODE_CONTINUOUS);

            // Apply changes and initialize
            faceConfig.ApplyChanges();
            senseManager.Init();
            faceData = faceModule.CreateOutput();

            // Mirror image
            senseManager.QueryCaptureManager().QueryDevice().SetMirrorMode(PXCMCapture.Device.MirrorMode.MIRROR_MODE_HORIZONTAL);

            //IVCam Setting
            senseManager.QueryCaptureManager().QueryDevice().SetIVCAMFilterOption(7);
            senseManager.QueryCaptureManager().QueryDevice().SetIVCAMAccuracy(PXCMCapture.Device.IVCAMAccuracy.IVCAM_ACCURACY_COARSE);
            senseManager.QueryCaptureManager().QueryDevice().SetIVCAMLaserPower(16);
            senseManager.QueryCaptureManager().QueryDevice().SetIVCAMMotionRangeTradeOff(100);

            // Release resources
            faceConfig.Dispose();
            faceModule.Dispose();
        }
        private void ConfigureRealSense()
        {
            PXCMFaceModule        faceModule;
            PXCMFaceConfiguration faceConfig;

            // Start the SenseManager and session
            senseManager = PXCMSenseManager.CreateInstance();

            // Enable the color stream
            senseManager.EnableStream(PXCMCapture.StreamType.STREAM_TYPE_COLOR, 640, 480, 30);
            //senseManager.EnableStream(PXCMCapture.StreamType.STREAM_TYPE_COLOR, 550, 550, 30);

            // Enable the face module
            senseManager.EnableFace();
            //senseManager.EnableHand();

            faceModule = senseManager.QueryFace();
            faceConfig = faceModule.CreateActiveConfiguration();

            // Configure for 3D face tracking (if camera cannot support depth it will revert to 2D tracking)
            faceConfig.SetTrackingMode(PXCMFaceConfiguration.TrackingModeType.FACE_MODE_COLOR_PLUS_DEPTH);

            expressionConfiguration = faceConfig.QueryExpressions();
            expressionConfiguration.Enable();
            expressionConfiguration.EnableAllExpressions();


            // Enable facial recognition
            recognitionConfig = faceConfig.QueryRecognition();
            recognitionConfig.Enable();

            // Create a recognition database
            PXCMFaceConfiguration.RecognitionConfiguration.RecognitionStorageDesc recognitionDesc = new PXCMFaceConfiguration.RecognitionConfiguration.RecognitionStorageDesc();
            recognitionDesc.maxUsers = DatabaseUsers;
            recognitionConfig.CreateStorage(DatabaseName, out recognitionDesc);
            recognitionConfig.UseStorage(DatabaseName);
            LoadDatabaseFromFile();
            recognitionConfig.SetRegistrationMode(PXCMFaceConfiguration.RecognitionConfiguration.RecognitionRegistrationMode.REGISTRATION_MODE_CONTINUOUS);

            // Apply changes and initialize
            faceConfig.ApplyChanges();
            senseManager.Init();
            faceData = faceModule.CreateOutput();

            // Mirror image
            senseManager.QueryCaptureManager().QueryDevice().SetMirrorMode(PXCMCapture.Device.MirrorMode.MIRROR_MODE_HORIZONTAL);

            // Release resources
            faceConfig.Dispose();
            faceModule.Dispose();
        }
        private void ConfigureRealSense()
        {
            PXCMFaceModule        faceModule;
            PXCMFaceConfiguration faceConfig;

            // Start the SenseManager and session
            senseManager = PXCMSenseManager.CreateInstance();

            // Enable the color stream
            senseManager.EnableStream(PXCMCapture.StreamType.STREAM_TYPE_COLOR, 640, 480, 30);

            // Enable the face module
            senseManager.EnableFace();
            faceModule = senseManager.QueryFace();
            faceConfig = faceModule.CreateActiveConfiguration();

            // Configure for 3D face tracking (if camera cannot support depth it will revert to 2D tracking)
            faceConfig.SetTrackingMode(PXCMFaceConfiguration.TrackingModeType.FACE_MODE_IR);

            // Enable facial recognition
            recognitionConfig = faceConfig.QueryRecognition();
            recognitionConfig.Enable();


            // Apply changes and initialize
            faceConfig.ApplyChanges();
            senseManager.Init();
            faceData = faceModule.CreateOutput();

            // Mirror image
            senseManager.QueryCaptureManager().QueryDevice().SetMirrorMode(PXCMCapture.Device.MirrorMode.MIRROR_MODE_HORIZONTAL);

            // Release resources
            faceConfig.Dispose();
            faceModule.Dispose();
        }
        private void ConfigureRealSense()
        {
            PXCMFaceModule faceModule;
            PXCMFaceConfiguration faceConfig;

            // Start the SenseManager and session
            senseManager = PXCMSenseManager.CreateInstance();
            captureManager = senseManager.captureManager;

            // Enable the color stream
            senseManager.EnableStream(PXCMCapture.StreamType.STREAM_TYPE_COLOR, 640, 480, 60);
            //senseManager.EnableStream(PXCMCapture.StreamType.STREAM_TYPE_DEPTH, 640, 480, 0);

            // Enable the face module
            senseManager.EnableFace();
            faceModule = senseManager.QueryFace();
            faceConfig = faceModule.CreateActiveConfiguration();

            // Configure for 3D face tracking (if camera cannot support depth it will revert to 2D tracking)
            faceConfig.SetTrackingMode(PXCMFaceConfiguration.TrackingModeType.FACE_MODE_COLOR_PLUS_DEPTH);

            // Enable facial recognition
            recognitionConfig = faceConfig.QueryRecognition();
            recognitionConfig.Enable();

            //Enable Landmark Detection

            faceConfig.landmarks.isEnabled = true;
            // Create a recognition database
            PXCMFaceConfiguration.RecognitionConfiguration.RecognitionStorageDesc recognitionDesc = new PXCMFaceConfiguration.RecognitionConfiguration.RecognitionStorageDesc();
            recognitionDesc.maxUsers = DatabaseUsers;
            //recognitionConfig.CreateStorage(DatabaseName, out recognitionDesc);
            //recognitionConfig.UseStorage(DatabaseName);
            LoadDatabaseFromFile();
            recognitionConfig.SetRegistrationMode(PXCMFaceConfiguration.RecognitionConfiguration.RecognitionRegistrationMode.REGISTRATION_MODE_CONTINUOUS);

            // Apply changes and initialize
            faceConfig.ApplyChanges();
            senseManager.Init();
            faceData = faceModule.CreateOutput();

            // Mirror image
            senseManager.QueryCaptureManager().QueryDevice().SetMirrorMode(PXCMCapture.Device.MirrorMode.MIRROR_MODE_HORIZONTAL);
            // Release resources
            faceConfig.Dispose();
            faceModule.Dispose();
        }
Beispiel #5
0
        private void FaceTrackingPipeline()
        {
            IsDispose = false;
            OnStart?.Invoke(this, null);

            #region Manager Init
            realSenseManager = RealSenseObjects.Session.CreateSenseManager();

            if (realSenseManager == null)
            {
                MessageBox.Show(
                    "PXCMSenseManager初始化失敗。",
                    "初始化失敗",
                    MessageBoxButtons.OK,
                    MessageBoxIcon.Error);
                OnStop?.Invoke(this, null);
                return;
            }

            PXCMCaptureManager captureManager = realSenseManager.captureManager;
            if (captureManager == null)
            {
                MessageBox.Show(
                    "PXCMCaptureManager初始化失敗。",
                    "初始化失敗",
                    MessageBoxButtons.OK,
                    MessageBoxIcon.Error);
                OnStop?.Invoke(this, null);
                return;
            }
            #endregion

            #region 基本設定
            //設定裝置
            captureManager.FilterByDeviceInfo(Form.SelectedDevice);

            //設定串流類型
            captureManager.FilterByStreamProfiles(Form.SelectedDeviceStreamProfile);


            //啟用臉部追蹤模組
            realSenseManager.EnableFace();
            PXCMFaceModule faceModule = realSenseManager.QueryFace();
            if (faceModule == null)
            {
                MessageBox.Show(
                    "取得PXCMFaceModule失敗。",
                    "初始化失敗",
                    MessageBoxButtons.OK,
                    MessageBoxIcon.Error);
                OnStop?.Invoke(this, null);
                return;
            }

            //建立臉部追蹤模組設定
            moduleConfiguration = faceModule.CreateActiveConfiguration();
            if (moduleConfiguration == null)
            {
                MessageBox.Show(
                    "建立PXCMFaceConfiguration失敗。",
                    "初始化失敗",
                    MessageBoxButtons.OK,
                    MessageBoxIcon.Error);
                OnStop?.Invoke(this, null);
                return;
            }
            //追蹤模式設定
            moduleConfiguration.SetTrackingMode(Form.ModeType);

            moduleConfiguration.strategy                  = PXCMFaceConfiguration.TrackingStrategyType.STRATEGY_RIGHT_TO_LEFT;
            moduleConfiguration.detection.isEnabled       = true;
            moduleConfiguration.detection.maxTrackedFaces = 4;//最大追蹤4個臉
            moduleConfiguration.landmarks.isEnabled       = false;
            moduleConfiguration.pose.isEnabled            = false;

            recognitionConfig =
                moduleConfiguration.QueryRecognition();

            if (recognitionConfig == null)
            {
                MessageBox.Show(
                    "建立RecognitionConfiguration失敗。",
                    "初始化失敗",
                    MessageBoxButtons.OK,
                    MessageBoxIcon.Error);
                OnStop?.Invoke(this, null);
                return;
            }

            recognitionConfig.Enable();
            #endregion

            #region 讀取資料庫數據
            if (Form.FaceData != null)
            {
                recognitionConfig.SetDatabase(Form.FaceData);
                moduleConfiguration.ApplyChanges();
            }
            #endregion

            #region 預備啟動
            moduleConfiguration.EnableAllAlerts();
            //moduleConfiguration.SubscribeAlert(FaceAlertHandler);

            pxcmStatus applyChangesStatus = moduleConfiguration.ApplyChanges();
            Form.SetStatus("RealSenseManager初始化中");
            if (applyChangesStatus.IsError() || realSenseManager.Init().IsError())
            {
                MessageBox.Show(
                    "RealSenseManager初始化失敗,請檢查設定正確。",
                    "初始化失敗",
                    MessageBoxButtons.OK,
                    MessageBoxIcon.Error);
                OnStop?.Invoke(this, null);
                return;
            }
            #endregion

            using (moduleOutput = faceModule.CreateOutput()) {
                PXCMCapture.Device.StreamProfileSet profiles;
                PXCMCapture.Device device = captureManager.QueryDevice();

                if (device == null)
                {
                    MessageBox.Show(
                        "取得設備失敗。",
                        "初始化失敗",
                        MessageBoxButtons.OK,
                        MessageBoxIcon.Error);
                    OnStop?.Invoke(this, null);
                    return;
                }

                device.QueryStreamProfileSet(PXCMCapture.StreamType.STREAM_TYPE_DEPTH, 0, out profiles);

                #region Loop
                while (!_Stop)
                {
                    while (_Paush)
                    {
                        Application.DoEvents();
                    }
                    if (realSenseManager.AcquireFrame(true).IsError())
                    {
                        break;
                    }
                    var isConnected = realSenseManager.IsConnected();
                    if (isConnected)
                    {
                        var sample = realSenseManager.QueryFaceSample();
                        if (sample == null)
                        {
                            realSenseManager.ReleaseFrame();
                            continue;
                        }
                        #region 畫面取出
                        PXCMImage image = null;
                        if (Form.ModeType == PXCMFaceConfiguration.TrackingModeType.FACE_MODE_IR)
                        {
                            image = sample.ir;
                        }
                        else
                        {
                            image = sample.color;
                        }
                        #endregion

                        moduleOutput.Update();//更新辨識
                        PXCMFaceConfiguration.RecognitionConfiguration recognition = moduleConfiguration.QueryRecognition();
                        if (recognition == null)
                        {
                            realSenseManager.ReleaseFrame();
                            continue;
                        }


                        #region 繪圖與事件
                        OnFrame?.Invoke(this, new FaceRecognitionEventArgs()
                        {
                            Image = ToBitmap(image)
                        });
                        FindFace(moduleOutput);
                        #endregion
                    }
                    //發布框
                    realSenseManager.ReleaseFrame();
                }
                #endregion

                //更新資料庫緩衝區
                //Buffer = moduleOutput.QueryRecognitionModule().GetDatabaseBuffer();
            }

            #region 釋放資源
            moduleConfiguration.Dispose();
            realSenseManager.Close();
            realSenseManager.Dispose();
            #endregion

            IsDispose = true;
            OnStop?.Invoke(this, null);
        }
Beispiel #6
0
        public void SimplePipeline()
        {
            PXCMSenseManager pp = m_form.Session.CreateSenseManager();

            if (pp == null)
            {
                throw new Exception("PXCMSenseManager null");
            }

            // Set Source & Landmark Profile Index
            PXCMCapture.DeviceInfo info;
            if (m_form.GetRecordState())
            {
                pp.captureManager.SetFileName(m_form.GetFileName(), true);
                if (m_form.Devices.TryGetValue(m_form.GetCheckedDevice(), out info))
                {
                    pp.captureManager.FilterByDeviceInfo(info);
                }
            }
            else if (m_form.GetPlaybackState())
            {
                pp.captureManager.SetFileName(m_form.GetFileName(), false);
                PXCMCaptureManager cmanager = pp.QueryCaptureManager();
                if (cmanager == null)
                {
                    throw new Exception("PXCMCaptureManager null");
                }
                cmanager.SetRealtime(false);
            }
            else
            {
                if (m_form.Devices.TryGetValue(m_form.GetCheckedDevice(), out info))
                {
                    pp.captureManager.FilterByDeviceInfo(info);
                    Tuple <PXCMImage.ImageInfo, PXCMRangeF32> selectedRes = m_form.GetCheckedColorResolution();
                    var set = new PXCMCapture.Device.StreamProfileSet();
                    set.color.frameRate        = selectedRes.Item2;
                    set.color.imageInfo.format = selectedRes.Item1.format;
                    set.color.imageInfo.width  = selectedRes.Item1.width;
                    set.color.imageInfo.height = selectedRes.Item1.height;
                    pp.captureManager.FilterByStreamProfiles(set);
                }
            }

            // Set Module
            pp.EnableFace();
            PXCMFaceModule faceModule = pp.QueryFace();

            if (faceModule == null)
            {
                Debug.Assert(faceModule != null);
                return;
            }

            PXCMFaceConfiguration moduleConfiguration = faceModule.CreateActiveConfiguration();

            if (moduleConfiguration == null)
            {
                Debug.Assert(moduleConfiguration != null);
                return;
            }

            PXCMFaceConfiguration.TrackingModeType mode = m_form.GetCheckedProfile().Contains("3D")
                ? PXCMFaceConfiguration.TrackingModeType.FACE_MODE_COLOR_PLUS_DEPTH
                : PXCMFaceConfiguration.TrackingModeType.FACE_MODE_COLOR;

            moduleConfiguration.SetTrackingMode(mode);

            moduleConfiguration.strategy = PXCMFaceConfiguration.TrackingStrategyType.STRATEGY_RIGHT_TO_LEFT;

            moduleConfiguration.detection.maxTrackedFaces = m_form.NumDetection;
            moduleConfiguration.landmarks.maxTrackedFaces = m_form.NumLandmarks;
            moduleConfiguration.pose.maxTrackedFaces      = m_form.NumPose;
            PXCMFaceConfiguration.ExpressionsConfiguration econfiguration = moduleConfiguration.QueryExpressions();
            if (econfiguration == null)
            {
                throw new Exception("ExpressionsConfiguration null");
            }
            econfiguration.properties.maxTrackedFaces = m_form.NumExpressions;

            econfiguration.EnableAllExpressions();
            moduleConfiguration.detection.isEnabled = m_form.IsDetectionEnabled();
            moduleConfiguration.landmarks.isEnabled = m_form.IsLandmarksEnabled();
            moduleConfiguration.pose.isEnabled      = m_form.IsPoseEnabled();
            if (m_form.IsExpressionsEnabled())
            {
                econfiguration.Enable();
            }

            PXCMFaceConfiguration.RecognitionConfiguration qrecognition = moduleConfiguration.QueryRecognition();
            if (qrecognition == null)
            {
                throw new Exception("PXCMFaceConfiguration.RecognitionConfiguration null");
            }
            if (m_form.IsRecognitionChecked())
            {
                qrecognition.Enable();
            }

            moduleConfiguration.EnableAllAlerts();
            moduleConfiguration.SubscribeAlert(FaceAlertHandler);

            pxcmStatus applyChangesStatus = moduleConfiguration.ApplyChanges();

            m_form.UpdateStatus("Init Started", MainForm.Label.StatusLabel);

            if (applyChangesStatus < pxcmStatus.PXCM_STATUS_NO_ERROR || pp.Init() < pxcmStatus.PXCM_STATUS_NO_ERROR)
            {
                m_form.UpdateStatus("Init Failed", MainForm.Label.StatusLabel);
            }
            else
            {
                using (PXCMFaceData moduleOutput = faceModule.CreateOutput())
                {
                    Debug.Assert(moduleOutput != null);
                    PXCMCapture.Device.StreamProfileSet profiles;

                    PXCMCaptureManager cmanager = pp.QueryCaptureManager();
                    if (cmanager == null)
                    {
                        throw new Exception("capture manager null");
                    }
                    PXCMCapture.Device device = cmanager.QueryDevice();

                    if (device == null)
                    {
                        throw new Exception("device null");
                    }

                    device.QueryStreamProfileSet(PXCMCapture.StreamType.STREAM_TYPE_DEPTH, 0, out profiles);
                    CheckForDepthStream(profiles, faceModule);

                    ushort threshold      = device.QueryDepthConfidenceThreshold();
                    int    filter_option  = device.QueryIVCAMFilterOption();
                    int    range_tradeoff = device.QueryIVCAMMotionRangeTradeOff();

                    device.SetDepthConfidenceThreshold(1);
                    device.SetIVCAMFilterOption(6);
                    device.SetIVCAMMotionRangeTradeOff(21);

                    if (m_form.IsMirrored())
                    {
                        device.SetMirrorMode(PXCMCapture.Device.MirrorMode.MIRROR_MODE_HORIZONTAL);
                    }
                    else
                    {
                        device.SetMirrorMode(PXCMCapture.Device.MirrorMode.MIRROR_MODE_DISABLED);
                    }

                    m_form.UpdateStatus("Streaming", MainForm.Label.StatusLabel);
                    m_timer = new FPSTimer(m_form);

                    while (!m_form.Stopped)
                    {
                        if (pp.AcquireFrame(true) < pxcmStatus.PXCM_STATUS_NO_ERROR)
                        {
                            break;
                        }
                        bool isConnected = pp.IsConnected();
                        DisplayDeviceConnection(isConnected);
                        if (isConnected)
                        {
                            PXCMCapture.Sample sample = pp.QueryFaceSample();
                            if (sample == null)
                            {
                                pp.ReleaseFrame();
                                continue;
                            }
                            DisplayPicture(sample.color);

                            moduleOutput.Update();
                            if (moduleConfiguration.QueryRecognition().properties.isEnabled)
                            {
                                UpdateRecognition(moduleOutput);
                            }

                            m_form.DrawGraphics(moduleOutput);
                            m_form.UpdatePanel();
                        }
                        pp.ReleaseFrame();
                    }

                    device.SetDepthConfidenceThreshold(threshold);
                    device.SetIVCAMFilterOption(filter_option);
                    device.SetIVCAMMotionRangeTradeOff(range_tradeoff);
                }

                moduleConfiguration.UnsubscribeAlert(FaceAlertHandler);
                moduleConfiguration.ApplyChanges();
                m_form.UpdateStatus("Stopped", MainForm.Label.StatusLabel);
            }
            moduleConfiguration.Dispose();
            pp.Close();
            pp.Dispose();
        }
Beispiel #7
0
        // 设置面部的许多参数,打开Landmark、Expression,未打开pulse、面部识别模块
        private void InitFaceState()
        {
            PXCMFaceConfiguration faceCfg = faceModule.CreateActiveConfiguration();

            if (faceCfg == null)
            {
#if DEBUG
                System.Windows.Forms.MessageBox.Show("faceCfg failed");
#endif
                return;
            }

            faceCfg.SetTrackingMode(PXCMFaceConfiguration.TrackingModeType.FACE_MODE_COLOR_PLUS_DEPTH);
            faceCfg.strategy = PXCMFaceConfiguration.TrackingStrategyType.STRATEGY_CLOSEST_TO_FARTHEST;
            // 单个人追踪
            faceCfg.detection.maxTrackedFaces = NUM_PERSONS;
            faceCfg.landmarks.maxTrackedFaces = NUM_PERSONS;
            faceCfg.pose.maxTrackedFaces      = NUM_PERSONS;

            // 表情初始化
            PXCMFaceConfiguration.ExpressionsConfiguration expressionCfg = faceCfg.QueryExpressions();
            if (expressionCfg == null)
            {
                throw new Exception("ExpressionsConfiguration null");
            }
            expressionCfg.properties.maxTrackedFaces = NUM_PERSONS;

            expressionCfg.EnableAllExpressions();
            faceCfg.detection.isEnabled = true;
            faceCfg.landmarks.isEnabled = true;
            faceCfg.pose.isEnabled      = true;
            if (expressionCfg != null)
            {
                expressionCfg.Enable();
            }

            //脉搏初始化
            if (false)
            {
                PXCMFaceConfiguration.PulseConfiguration pulseConfiguration = faceCfg.QueryPulse();
                if (pulseConfiguration == null)
                {
                    throw new Exception("pulseConfiguration null");
                }

                pulseConfiguration.properties.maxTrackedFaces = NUM_PERSONS;
                if (pulseConfiguration != null)
                {
                    pulseConfiguration.Enable();
                }
            }

            // 面部识别功能初始化
            if (false)
            {
                PXCMFaceConfiguration.RecognitionConfiguration qrecognition = faceCfg.QueryRecognition();
                if (qrecognition == null)
                {
                    throw new Exception("PXCMFaceConfiguration.RecognitionConfiguration null");
                }
                else
                {
                    qrecognition.Enable();
                }
            }

            faceCfg.ApplyChanges();
        }
Beispiel #8
0
        public void SimplePipeline()
        {
            PXCMSenseManager pp = m_form.Session.CreateSenseManager();

            if (pp == null)
            {
                throw new Exception("PXCMSenseManager null");
            }

            PXCMCaptureManager captureMgr = pp.captureManager;

            if (captureMgr == null)
            {
                throw new Exception("PXCMCaptureManager null");
            }

            var selectedRes = m_form.GetCheckedColorResolution();

            if (selectedRes != null && !m_form.IsInPlaybackState())
            {
                // Set active camera
                PXCMCapture.DeviceInfo deviceInfo;
                m_form.Devices.TryGetValue(m_form.GetCheckedDevice(), out deviceInfo);
                captureMgr.FilterByDeviceInfo(m_form.GetCheckedDeviceInfo());

                // activate filter only live/record mode , no need in playback mode
                var set = new PXCMCapture.Device.StreamProfileSet
                {
                    color =
                    {
                        frameRate = selectedRes.Item2,
                        imageInfo =
                        {
                            format = selectedRes.Item1.format,
                            height = selectedRes.Item1.height,
                            width  = selectedRes.Item1.width
                        }
                    }
                };

                if (m_form.IsPulseEnabled() && (set.color.imageInfo.width < 1280 || set.color.imageInfo.height < 720))
                {
                    captureMgr.FilterByStreamProfiles(PXCMCapture.StreamType.STREAM_TYPE_COLOR, 1280, 720, 0);
                }
                else
                {
                    captureMgr.FilterByStreamProfiles(set);
                }
            }

            // Set Source & Landmark Profile Index
            if (m_form.IsInPlaybackState())
            {
                //pp.captureManager.FilterByStreamProfiles(null);
                captureMgr.SetFileName(m_form.GetFileName(), false);
                captureMgr.SetRealtime(false);
            }
            else if (m_form.GetRecordState())
            {
                captureMgr.SetFileName(m_form.GetFileName(), true);
            }

            // Set Module
            pp.EnableFace();
            PXCMFaceModule faceModule = pp.QueryFace();

            if (faceModule == null)
            {
                Debug.Assert(faceModule != null);
                return;
            }

            PXCMFaceConfiguration moduleConfiguration = faceModule.CreateActiveConfiguration();

            if (moduleConfiguration == null)
            {
                Debug.Assert(moduleConfiguration != null);
                return;
            }

            var checkedProfile = m_form.GetCheckedProfile();
            var mode           = m_form.FaceModesMap.First(x => x.Value == checkedProfile).Key;

            moduleConfiguration.SetTrackingMode(mode);

            moduleConfiguration.strategy = PXCMFaceConfiguration.TrackingStrategyType.STRATEGY_RIGHT_TO_LEFT;

            moduleConfiguration.detection.maxTrackedFaces = m_form.NumDetection;
            moduleConfiguration.landmarks.maxTrackedFaces = m_form.NumLandmarks;
            moduleConfiguration.pose.maxTrackedFaces      = m_form.NumPose;

            PXCMFaceConfiguration.ExpressionsConfiguration econfiguration = moduleConfiguration.QueryExpressions();
            if (econfiguration == null)
            {
                throw new Exception("ExpressionsConfiguration null");
            }
            econfiguration.properties.maxTrackedFaces = m_form.NumExpressions;

            econfiguration.EnableAllExpressions();
            moduleConfiguration.detection.isEnabled = m_form.IsDetectionEnabled();
            moduleConfiguration.landmarks.isEnabled = m_form.IsLandmarksEnabled();
            moduleConfiguration.pose.isEnabled      = m_form.IsPoseEnabled();
            if (m_form.IsExpressionsEnabled())
            {
                econfiguration.Enable();
            }

            PXCMFaceConfiguration.PulseConfiguration pulseConfiguration = moduleConfiguration.QueryPulse();
            if (pulseConfiguration == null)
            {
                throw new Exception("pulseConfiguration null");
            }

            pulseConfiguration.properties.maxTrackedFaces = m_form.NumPulse;
            if (m_form.IsPulseEnabled())
            {
                pulseConfiguration.Enable();
            }

            qrecognition = moduleConfiguration.QueryRecognition();
            if (qrecognition == null)
            {
                throw new Exception("PXCMFaceConfiguration.RecognitionConfiguration null");
            }
            if (m_form.IsRecognitionChecked())
            {
                qrecognition.Enable();


                #region 臉部辨識資料庫讀取
                if (File.Exists(DatabasePath))
                {
                    m_form.UpdateStatus("正在讀取資料庫", MainForm.Label.StatusLabel);
                    List <RecognitionFaceData> faceData = null;
                    FaceDatabaseFile.Load(DatabasePath, ref faceData, ref NameMapping);
                    FaceData = faceData.ToArray();
                    qrecognition.SetDatabase(FaceData);
                }
                #endregion
            }



            moduleConfiguration.EnableAllAlerts();
            moduleConfiguration.SubscribeAlert(FaceAlertHandler);

            pxcmStatus applyChangesStatus = moduleConfiguration.ApplyChanges();

            m_form.UpdateStatus("Init Started", MainForm.Label.StatusLabel);

            if (applyChangesStatus < pxcmStatus.PXCM_STATUS_NO_ERROR || pp.Init() < pxcmStatus.PXCM_STATUS_NO_ERROR)
            {
                m_form.UpdateStatus("Init Failed", MainForm.Label.StatusLabel);
            }
            else
            {
                using (PXCMFaceData moduleOutput = faceModule.CreateOutput())
                {
                    Debug.Assert(moduleOutput != null);
                    PXCMCapture.Device.StreamProfileSet profiles;
                    PXCMCapture.Device device = captureMgr.QueryDevice();

                    if (device == null)
                    {
                        throw new Exception("device null");
                    }

                    device.QueryStreamProfileSet(PXCMCapture.StreamType.STREAM_TYPE_DEPTH, 0, out profiles);
                    CheckForDepthStream(profiles, faceModule);

                    m_form.UpdateStatus("Streaming", MainForm.Label.StatusLabel);
                    m_timer = new FPSTimer(m_form);

                    #region loop
                    while (!m_form.Stopped)
                    {
                        if (pp.AcquireFrame(true) < pxcmStatus.PXCM_STATUS_NO_ERROR)
                        {
                            break;
                        }
                        var isConnected = pp.IsConnected();
                        DisplayDeviceConnection(isConnected);
                        if (isConnected)
                        {
                            var sample = pp.QueryFaceSample();
                            if (sample == null)
                            {
                                pp.ReleaseFrame();
                                continue;
                            }
                            switch (mode)
                            {
                            case PXCMFaceConfiguration.TrackingModeType.FACE_MODE_IR:
                                if (sample.ir != null)
                                {
                                    DisplayPicture(sample.ir);
                                }
                                break;

                            default:
                                DisplayPicture(sample.color);
                                break;
                            }

                            moduleOutput.Update();
                            PXCMFaceConfiguration.RecognitionConfiguration recognition = moduleConfiguration.QueryRecognition();
                            if (recognition == null)
                            {
                                pp.ReleaseFrame();
                                continue;
                            }

                            if (recognition.properties.isEnabled)
                            {
                                UpdateRecognition(moduleOutput);
                            }

                            m_form.DrawGraphics(moduleOutput);
                            m_form.UpdatePanel();
                        }
                        pp.ReleaseFrame();
                    }
                    #endregion
                }

                //             moduleConfiguration.UnsubscribeAlert(FaceAlertHandler);
                //             moduleConfiguration.ApplyChanges();
                m_form.UpdateStatus("Stopped", MainForm.Label.StatusLabel);
            }

            #region 儲存臉部辨識資訊檔案
            if (DatabaseChanged)
            {
                FaceDatabaseFile.Save(DatabasePath, FaceData.ToList(), NameMapping);
            }
            #endregion

            var dbm = new FaceDatabaseManager(pp);


            moduleConfiguration.Dispose();
            pp.Close();
            pp.Dispose();
        }
Beispiel #9
0
        private void ConfigureRealSense()
        {
            PXCMFaceModule faceModule;



            // Start the SenseManager and session
            senseManager = PXCMSenseManager.CreateInstance();
            senseManager.captureManager.SetFileName("recorded_video.wm", true);
            senseManager.EnableStream(PXCMCapture.StreamType.STREAM_TYPE_COLOR, 640, 480, 0);

            senseManager.Init();

            senseManager.captureManager.SetRealtime(false);
            senseManager.captureManager.SetPause(true);

            // Enable the color stream
            //senseManager.EnableStream(PXCMCapture.StreamType.STREAM_TYPE_COLOR, 640, 480, 60);
            //60 0r 0 for fps?

            // Enable the face module
            senseManager.EnableFace();
            faceModule = senseManager.QueryFace();
            faceConfig = faceModule.CreateActiveConfiguration();
            faceConfig.detection.isEnabled = true;
            expressionConfig = faceConfig.QueryExpressions();
            expressionConfig.Enable();
            expressionConfig.EnableAllExpressions();
            faceConfig.EnableAllAlerts();



            // Configure for 3D face tracking (if camera cannot support depth it will revert to 2D tracking)
            faceConfig.SetTrackingMode(PXCMFaceConfiguration.TrackingModeType.FACE_MODE_COLOR_PLUS_DEPTH);

            // Enable facial recognition
            recognitionConfig = faceConfig.QueryRecognition();
            recognitionConfig.Enable();


            // Create a recognition database
            PXCMFaceConfiguration.RecognitionConfiguration.RecognitionStorageDesc recognitionDesc = new PXCMFaceConfiguration.RecognitionConfiguration.RecognitionStorageDesc();
            recognitionDesc.maxUsers = DatabaseUsers;
            recognitionConfig.CreateStorage(DatabaseName, out recognitionDesc);
            recognitionConfig.UseStorage(DatabaseName);
            LoadDatabaseFromFile();
            recognitionConfig.SetRegistrationMode(PXCMFaceConfiguration.RecognitionConfiguration.RecognitionRegistrationMode.REGISTRATION_MODE_CONTINUOUS);

            // Apply changes and initialize
            faceConfig.ApplyChanges();
            senseManager.Init();
            faceData = faceModule.CreateOutput();
            int numFaces = faceData.QueryNumberOfDetectedFaces();

            Console.WriteLine("number of detected faces", numFaces);

            // Mirror image
            senseManager.QueryCaptureManager().QueryDevice().SetMirrorMode(PXCMCapture.Device.MirrorMode.MIRROR_MODE_HORIZONTAL);

            // Release resources
            faceConfig.Dispose();
            faceModule.Dispose();
        }
Beispiel #10
0
        private void InitializeFace()
        {
            // 顔検出を有効にする
            var sts = senceManager.EnableFace();

            if (sts < pxcmStatus.PXCM_STATUS_NO_ERROR)
            {
                throw new Exception("顔検出の有効化に失敗しました");
            }

            //顔検出器を生成する
            var faceModule = senceManager.QueryFace();

            //顔検出のプロパティを取得
            PXCMFaceConfiguration config = faceModule.CreateActiveConfiguration();

            config.SetTrackingMode(PXCMFaceConfiguration.TrackingModeType.FACE_MODE_COLOR_PLUS_DEPTH);

            config.ApplyChanges();

            //追加:顔識別のプロパティを取得
            PXCMFaceConfiguration.RecognitionConfiguration rcfg = config.QueryRecognition();

            //追加:顔識別を有効化
            rcfg.Enable();

            //追加:顔識別用データベースの用意
            PXCMFaceConfiguration.RecognitionConfiguration.RecognitionStorageDesc desc = new PXCMFaceConfiguration.RecognitionConfiguration.RecognitionStorageDesc();
            desc.maxUsers = 10;
            rcfg.CreateStorage("MyDB", out desc);
            rcfg.UseStorage("MyDB");

            //追加:顔識別の登録の設定を行う
            rcfg.SetRegistrationMode(PXCMFaceConfiguration.RecognitionConfiguration.RecognitionRegistrationMode.REGISTRATION_MODE_CONTINUOUS);

            // パイプラインを初期化する
            pxcmStatus ret = senceManager.Init();

            if (ret < pxcmStatus.PXCM_STATUS_NO_ERROR)
            {
                throw new Exception("初期化に失敗しました");
            }

            // デバイス情報の取得
            PXCMCapture.Device device = senceManager.QueryCaptureManager().QueryDevice();
            if (device == null)
            {
                throw new Exception("deviceの生成に失敗しました");
            }


            // ミラー表示にする
            device.SetMirrorMode(PXCMCapture.Device.MirrorMode.MIRROR_MODE_HORIZONTAL);

            PXCMCapture.DeviceInfo deviceInfo;
            device.QueryDeviceInfo(out deviceInfo);
            if (deviceInfo.model == PXCMCapture.DeviceModel.DEVICE_MODEL_IVCAM)
            {
                device.SetDepthConfidenceThreshold(1);
                device.SetIVCAMFilterOption(6);
                device.SetIVCAMMotionRangeTradeOff(21);
            }


            config.detection.isEnabled       = true;
            config.detection.maxTrackedFaces = DETECTION_MAXFACES;
            config.QueryRecognition().Enable();
            config.ApplyChanges();

            faceData = faceModule.CreateOutput();
        }
        //---------------------------------------------------ConfigureRealSense-----------------------------------------------------------------------------------------------------



        public static void ConfigureRealSense() //Configura o pipeline para processamento
        {
            /* Start the SenseManager and session
             * Cria a instancia de PXCMSenseManager em senseManager para
             * interface com algoritmos pré-definidos ex: Reconhecimento facial
             */
            senseManager = PXCMSenseManager.CreateInstance(); //private PXCMSenseManager senseManager;

            /* Enable the color stream
             * Habilita a transmissão de cor
             */
            ///////senseManager.EnableStream(PXCMCapture.StreamType.STREAM_TYPE_COLOR, 640, 480, 30);

            /* Enable the face module
             * Ativa o Rastreamento de face no pipeline
             */
            senseManager.EnableFace();
            PXCMFaceModule faceModule = senseManager.QueryFace(); //Retorna a instância do módulo da face (FaceModule)

            /* O CreateActiveConfiguration função retorna uma nova instância do FaceConfiguration exemplo,
             * que o aplicativo usa para configurar o módulo de rastreamento de face.
             * O pedido deve liberar a instância após o uso.
             */
            PXCMFaceConfiguration faceConfig = faceModule.CreateActiveConfiguration();

            // Configure for 3D face tracking (if camera cannot support depth it will revert to 2D tracking)
            faceConfig.SetTrackingMode(PXCMFaceConfiguration.TrackingModeType.FACE_MODE_COLOR_PLUS_DEPTH);

            /* Enable facial recognition
             * Cria uma interface de configuração do algoritmo de reconhecimento facial
             * Neste algoritmo já tem as opções de CRUD, só é preciso configura-las
             */
            recognitionConfig = faceConfig.QueryRecognition(); //private PXCMFaceConfiguration.RecognitionConfiguration recognitionConfig;
            recognitionConfig.Enable();                        //faceModule.CreateActiveConfiguration().QueryRecognition().Enable();

            /* Create a recognition database
             * A estrutura 'RecognitionStorageDesc' descreve os parâmetros de configuração de banco de dados de reconhecimento.
             */
            PXCMFaceConfiguration.RecognitionConfiguration.RecognitionStorageDesc storageDesc = new PXCMFaceConfiguration.RecognitionConfiguration.RecognitionStorageDesc();
            storageDesc.maxUsers = DatabaseUsers;                           //Numero máximo de usuários suportado pelo banco de dados
            recognitionConfig.CreateStorage(DatabaseName, out storageDesc); //Cria o banco de dados passando a descrição configurada em 'storageDesc'
            recognitionConfig.UseStorage(DatabaseName);                     //Coloca o banco de dados em uso

            /* Verifica se o arquivo existe e se existir, executa 'SetDatabaseBuffer'
             * para substitui o banco de dados de reconhecimento
             */
            LoadDatabaseFromFile();

            /* REGISTRATION_MODE_CONTINUOUS registrar os usuários automaticamente.
             * REGISTRATION_MODE_ON_DEMAND  registar os usuários mediante solicitação.
             */
            recognitionConfig.SetRegistrationMode(PXCMFaceConfiguration.RecognitionConfiguration.RecognitionRegistrationMode.REGISTRATION_MODE_CONTINUOUS);


            faceConfig.ApplyChanges(); // Aplica as configurações feitas no Modulo de Reconhecimento facial

            /* A função 'Init' inicializa o pipeline
             * (Isso configura o pipeline de processamento que recebe e processa os dados do hardware)
             */
            senseManager.Init();

            /* A função 'CreateOutput' retorna uma nova instância do 'FaceData' exemplo, que o aplicativo usa para armazenar e recuperar
             *  os dados de saída do módulo de rastreamento de face.
             *  O pedido deve liberar a instância após o uso.
             */
            faceData = faceModule.CreateOutput();

            /* A função 'SetMirrorMode' define a orientação das imagens da câmera.
             */
            senseManager.QueryCaptureManager().QueryDevice().SetMirrorMode(PXCMCapture.Device.MirrorMode.MIRROR_MODE_HORIZONTAL);

            // Libera os recursos
            faceConfig.Dispose();
            faceModule.Dispose();
        }
Beispiel #12
0
        public void SimplePipeline()
        {
            PXCMSenseManager pp = m_form.Session.CreateSenseManager();

            if (pp == null)
            {
                throw new Exception("PXCMSenseManager null");
            }

            PXCMCaptureManager captureMgr = pp.captureManager;

            if (captureMgr == null)
            {
                throw new Exception("PXCMCaptureManager null");
            }

            var selectedRes = m_form.GetCheckedColorResolution();

            if (selectedRes != null)
            {
                // Set active camera
                PXCMCapture.DeviceInfo deviceInfo;
                m_form.Devices.TryGetValue(m_form.GetCheckedDevice(), out deviceInfo);
                captureMgr.FilterByDeviceInfo(m_form.GetCheckedDeviceInfo());

                // activate filter only live/record mode , no need in playback mode
                var set = new PXCMCapture.Device.StreamProfileSet
                {
                    color =
                    {
                        frameRate = selectedRes.Item2,
                        imageInfo =
                        {
                            format = selectedRes.Item1.format,
                            height = selectedRes.Item1.height,
                            width  = selectedRes.Item1.width
                        }
                    }
                };
            }

            // Set Module
            pp.EnableFace();
            PXCMFaceModule faceModule = pp.QueryFace();

            if (faceModule == null)
            {
                Debug.Assert(faceModule != null);
                return;
            }

            PXCMFaceConfiguration moduleConfiguration = faceModule.CreateActiveConfiguration();

            if (moduleConfiguration == null)
            {
                Debug.Assert(moduleConfiguration != null);
                return;
            }

            var checkedProfile = m_form.GetCheckedProfile();
            var mode           = m_form.FaceModesMap.First(x => x.Value == checkedProfile).Key;

            moduleConfiguration.SetTrackingMode(mode);

            moduleConfiguration.strategy = PXCMFaceConfiguration.TrackingStrategyType.STRATEGY_RIGHT_TO_LEFT;

            PXCMFaceConfiguration.ExpressionsConfiguration econfiguration = moduleConfiguration.QueryExpressions();
            if (econfiguration == null)
            {
                throw new Exception("ExpressionsConfiguration null");
            }
            econfiguration.properties.maxTrackedFaces = 4; //Expressionの最大認識量

            econfiguration.EnableAllExpressions();
            //if (m_form.IsExpressionsEnabled())
            //{
            econfiguration.Enable(); //Expressionsの強制有効化
            //}
            moduleConfiguration.EnableAllAlerts();
            moduleConfiguration.SubscribeAlert(FaceAlertHandler);

            pxcmStatus applyChangesStatus = moduleConfiguration.ApplyChanges();

            m_form.UpdateStatus("Init Started", MainForm.Label.StatusLabel);
            m_form.UpdateStatus("カメラ起動中", MainForm.Label.ReportLabel);

            if (applyChangesStatus < pxcmStatus.PXCM_STATUS_NO_ERROR || pp.Init() < pxcmStatus.PXCM_STATUS_NO_ERROR)
            {
                m_form.UpdateStatus("Init Failed", MainForm.Label.StatusLabel);
                m_form.UpdateStatus("設定エラー:メニューから適切な設定をしてください。", MainForm.Label.ReportLabel);
            }
            else
            {
                using (PXCMFaceData moduleOutput = faceModule.CreateOutput())
                {
                    Debug.Assert(moduleOutput != null);
                    PXCMCapture.Device.StreamProfileSet profiles;
                    PXCMCapture.Device device = captureMgr.QueryDevice();

                    if (device == null)
                    {
                        m_form.UpdateStatus("カメラを接続してください。", MainForm.Label.ReportLabel);
                        throw new Exception("device null");
                    }

                    device.QueryStreamProfileSet(PXCMCapture.StreamType.STREAM_TYPE_DEPTH, 0, out profiles);
                    CheckForDepthStream(profiles, faceModule);

                    m_form.UpdateStatus("Streaming", MainForm.Label.StatusLabel);
                    m_timer = new FPSTimer(m_form);

                    while (!m_form.Stopped)
                    {
                        if (pp.AcquireFrame(true) < pxcmStatus.PXCM_STATUS_NO_ERROR)
                        {
                            break;
                        }
                        var isConnected = pp.IsConnected();
                        DisplayDeviceConnection(isConnected);
                        if (isConnected)
                        {
                            var sample = pp.QueryFaceSample();
                            if (sample == null)
                            {
                                pp.ReleaseFrame();
                                continue;
                            }
                            switch (mode)
                            {
                            case PXCMFaceConfiguration.TrackingModeType.FACE_MODE_IR:
                                if (sample.ir != null)
                                {
                                    DisplayPicture(sample.ir);
                                }
                                break;

                            default:
                                DisplayPicture(sample.color);
                                break;
                            }

                            moduleOutput.Update();
                            PXCMFaceConfiguration.RecognitionConfiguration recognition = moduleConfiguration.QueryRecognition();
                            if (recognition == null)
                            {
                                pp.ReleaseFrame();
                                continue;
                            }

                            m_form.DrawGraphics(moduleOutput);
                            m_form.UpdatePanel();
                        }
                        pp.ReleaseFrame();
                    }
                }

                //             moduleConfiguration.UnsubscribeAlert(FaceAlertHandler);
                //             moduleConfiguration.ApplyChanges();
                m_form.UpdateStatus("Stopped", MainForm.Label.StatusLabel);
            }
            moduleConfiguration.Dispose();
            pp.Close();
            pp.Dispose();
        }