private void startCamera() { session = PXCMSession.CreateInstance(); manager = session.CreateSenseManager(); if (manager == null) { Console.WriteLine("Failed"); } manager.EnableStream(PXCMCapture.StreamType.STREAM_TYPE_COLOR, 1920, 1080, 30); manager.EnableStream(PXCMCapture.StreamType.STREAM_TYPE_DEPTH, 640, 480, 60); manager.EnableStream(PXCMCapture.StreamType.STREAM_TYPE_IR, 640, 480, 60); manager.EnableHand(); PXCMHandConfiguration config = manager.QueryHand().CreateActiveConfiguration(); config.EnableAllAlerts(); config.EnableSegmentationImage(true); config.EnableTrackedJoints(true); config.LoadGesturePack("navigation"); config.EnableAllGestures(true); config.ApplyChanges(); config.Dispose(); manager.Init(); thread = new Thread(new ThreadStart(updateThread)); thread.Start(); }
//RealSenseメソッド------------------------------------------------------------------- /// <summary> 機能の初期化 </summary> private bool InitializeRealSense() { try { //SenseManagerを生成 senseManager = PXCMSenseManager.CreateInstance(); //カラーストリームの有効 var sts = senseManager.EnableStream(PXCMCapture.StreamType.STREAM_TYPE_COLOR, COLOR_WIDTH, COLOR_HEIGHT, COLOR_FPS); if (sts < pxcmStatus.PXCM_STATUS_NO_ERROR) { throw new Exception("Colorストリームの有効化に失敗しました"); } // Depthストリームを有効にする sts = senseManager.EnableStream(PXCMCapture.StreamType.STREAM_TYPE_DEPTH, DEPTH_WIDTH, DEPTH_HEIGHT, DEPTH_FPS); if (sts < pxcmStatus.PXCM_STATUS_NO_ERROR) { throw new Exception("Depthストリームの有効化に失敗しました"); } // 手の検出を有効にする sts = senseManager.EnableHand(); if (sts < pxcmStatus.PXCM_STATUS_NO_ERROR) { throw new Exception("手の検出の有効化に失敗しました"); } //パイプラインを初期化する //(インスタンスはInit()が正常終了した後作成されるので,機能に対する各種設定はInit()呼び出し後となる) sts = senseManager.Init(); if (sts < pxcmStatus.PXCM_STATUS_NO_ERROR) { throw new Exception("パイプラインの初期化に失敗しました"); } //ミラー表示にする senseManager.QueryCaptureManager().QueryDevice().SetMirrorMode( PXCMCapture.Device.MirrorMode.MIRROR_MODE_HORIZONTAL); //デバイスを取得する device = senseManager.captureManager.device; //座標変換オブジェクトを作成 projection = device.CreateProjection(); // 手の検出の初期化 InitializeHandTracking(); return(true); } catch (Exception ex) { Console.WriteLine(ex.Message); return(false); } }
/// <summary> /// コンストラクタ /// /// カメラ画像表示用のWritableBitmapを準備してImageに割り当て /// カメラ制御用のオブジェクトを取得して /// Colorカメラストリームを有効にしてカメラのフレーム取得を開始する /// カメラキャプチャーするためのTaskを準備して開始 /// </summary> public MainWindow() { // 画面コンポーネントを初期化 InitializeComponent(); // ジェスチャー画像表示用Imageを非表示にする HiddenGestureImage(); // カメラ画像書き込み用のWriteableBitmapを準備してImageコントローラーにセット m_ColorWBitmap = new WriteableBitmap(1920, 1080, 96.0, 96.0, PixelFormats.Bgr32, null); ColorCameraImage.Source = m_ColorWBitmap; m_HandSegmentWBitmap = new WriteableBitmap(640, 480, 96.0, 96.0, PixelFormats.Gray8, null); HandSegmentImage.Source = m_HandSegmentWBitmap; m_HandJointWBitmap = new WriteableBitmap(640, 480, 96.0, 96.0, PixelFormats.Bgra32, null); HandJointtImage.Source = m_HandJointWBitmap; // カメラ制御のオブジェクトを取得 m_Session = PXCMSession.CreateInstance(); m_Cm = m_Session.CreateSenseManager(); // Color,Depth,Irストリームを有効にする m_Cm.EnableStream(PXCMCapture.StreamType.STREAM_TYPE_COLOR, 1920, 1080, 30); m_Cm.EnableStream(PXCMCapture.StreamType.STREAM_TYPE_DEPTH, 640, 480, 30); m_Cm.EnableStream(PXCMCapture.StreamType.STREAM_TYPE_IR, 640, 480, 30); // Hand Trackingを有効化と設定 m_Cm.EnableHand(); PXCMHandModule handModule = m_Cm.QueryHand(); PXCMHandConfiguration handConfig = handModule.CreateActiveConfiguration(); handConfig.SetTrackingMode(PXCMHandData.TrackingModeType.TRACKING_MODE_FULL_HAND); // FULL_HANDモード handConfig.EnableSegmentationImage(true); // SegmentationImageの有効化 handConfig.EnableAllGestures(); // すべてのジェスチャーを補足 handConfig.SubscribeGesture(OnFiredGesture); // ジェスチャー発生時のコールバック関数をセット handConfig.ApplyChanges(); // HandDataのインスタンスを作成 m_HandData = handModule.CreateOutput(); // カメラのフレーム取得開始 pxcmStatus initState = m_Cm.Init(); if (initState < pxcmStatus.PXCM_STATUS_NO_ERROR) { // エラー発生 MessageBox.Show(initState + "\nカメラ初期化に失敗しました。"); return; } // カメラ取得画像をミラーモードにする m_Cm.captureManager.device.SetMirrorMode(PXCMCapture.Device.MirrorMode.MIRROR_MODE_HORIZONTAL); // 座標変換のためのProjectionインスタンスを取得 m_Projection = m_Cm.QueryCaptureManager().QueryDevice().CreateProjection(); // カメラキャプチャーをするためのタスクを準備して起動 m_CameraCaptureTask = new Task(() => CaptureCameraProcess()); m_CameraCaptureTask.Start(); }
private void Button_Click(object sender, RoutedEventArgs e) { // Get instance of SenseManager PXCMSession session = PXCMSession.CreateInstance(); // Get RS version PXCMSession.ImplVersion version = session.QueryVersion(); textBox1.Text = version.major.ToString() + "." + version.minor.ToString(); // setup Pipeline PXCMSenseManager sm = session.CreateSenseManager(); // Get streams ready sm.EnableStream(PXCMCapture.StreamType.STREAM_TYPE_COLOR, 640, 480); sm.EnableStream(PXCMCapture.StreamType.STREAM_TYPE_DEPTH, 640, 480); // Init Pipeline sm.Init(); // Get samples pxcmStatus status = sm.AcquireFrame(true); // Synchronous capturing PXCMCapture.Sample sample = sm.QuerySample(); // Convert samples to image PXCMImage image = sample.color; PXCMImage dimage = sample.depth; PXCMImage.ImageData data; image.AcquireAccess(PXCMImage.Access.ACCESS_READ, PXCMImage.PixelFormat.PIXEL_FORMAT_RGB32, out data); WriteableBitmap wbm = data.ToWritableBitmap(0, image.info.width, image.info.height, 96.0, 96.0); PXCMImage.ImageData data2; dimage.AcquireAccess(PXCMImage.Access.ACCESS_READ, PXCMImage.PixelFormat.PIXEL_FORMAT_DEPTH_RAW, out data2); WriteableBitmap wbm2 = data2.ToWritableBitmap( 0, dimage.info.width, dimage.info.height, 96.0, 96.0); // Display image imageRGB.Source = wbm; imageDepth.Source = wbm2; // Clean up image.ReleaseAccess(data); image.ReleaseAccess(data2); sm.ReleaseFrame(); sm.Close(); session.Dispose(); }
public void Initialise(PXCMSenseManager senseManager) { this.senseManager = senseManager; senseManager.EnableStream( PXCMCapture.StreamType.STREAM_TYPE_COLOR, 0, 0, 0).ThrowOnFail(); senseManager.EnableStream( PXCMCapture.StreamType.STREAM_TYPE_DEPTH, 0, 0, 0).ThrowOnFail(); }
private void Initialize() { try { // SenseManagerを生成する senseManager = PXCMSenseManager.CreateInstance(); // カラーストリームを有効にする var sts = senseManager.EnableStream(PXCMCapture.StreamType.STREAM_TYPE_COLOR, COLOR_WIDTH, COLOR_HEIGHT, COLOR_FPS); if (sts < pxcmStatus.PXCM_STATUS_NO_ERROR) { throw new Exception("カラーストリームの有効化に失敗しました"); } // Depthストリームを有効にする sts = senseManager.EnableStream(PXCMCapture.StreamType.STREAM_TYPE_DEPTH, DEPTH_WIDTH, DEPTH_HEIGHT, DEPTH_FPS); if (sts < pxcmStatus.PXCM_STATUS_NO_ERROR) { throw new Exception("Depthストリームの有効化に失敗しました"); } // 手の検出を有効にする sts = senseManager.EnableHand(); if (sts < pxcmStatus.PXCM_STATUS_NO_ERROR) { throw new Exception("手の検出の有効化に失敗しました"); } // パイプラインを初期化する sts = senseManager.Init(); if (sts < pxcmStatus.PXCM_STATUS_NO_ERROR) { throw new Exception("初期化に失敗しました"); } // デバイスを取得する var device = senseManager.QueryCaptureManager().QueryDevice(); // ミラー表示にする device.SetMirrorMode( PXCMCapture.Device.MirrorMode.MIRROR_MODE_HORIZONTAL); // 座標変換オブジェクトを作成 projection = device.CreateProjection(); // 手の検出の初期化 InitializeHandTracking(); } catch (Exception ex) { MessageBox.Show(ex.Message); Close(); } }
private void Initialize() { try { // SenseManagerを生成する senseManager = PXCMSenseManager.CreateInstance(); // カラーストリームを有効にする senseManager.EnableStream( PXCMCapture.StreamType.STREAM_TYPE_COLOR, COLOR_WIDTH, COLOR_HEIGHT, COLOR_FPS ); // パイプラインを初期化する pxcmStatus ret = senseManager.Init(); if ( ret < pxcmStatus.PXCM_STATUS_NO_ERROR ) { throw new Exception( "初期化に失敗しました" ); } // ミラー表示にする senseManager.QueryCaptureManager().QueryDevice().SetMirrorMode( PXCMCapture.Device.MirrorMode.MIRROR_MODE_HORIZONTAL ); // 音声認識を初期化する InitializeSpeechRecognition(); } catch ( Exception ex ) { MessageBox.Show( ex.Message ); Close(); } }
private void Initialize() { // SenseManagerを生成する senseManager = PXCMSenseManager.CreateInstance(); // カラーストリームを有効にする pxcmStatus sts = senseManager.EnableStream( PXCMCapture.StreamType.STREAM_TYPE_DEPTH, DEPTH_WIDTH, DEPTH_HEIGHT, DEPTH_FPS); if (sts < pxcmStatus.PXCM_STATUS_NO_ERROR) { throw new Exception("Depthストリームの有効化に失敗しました"); } // パイプラインを初期化する sts = senseManager.Init(); if (sts < pxcmStatus.PXCM_STATUS_NO_ERROR) { throw new Exception("初期化に失敗しました"); } // ミラー表示にする senseManager.QueryCaptureManager().QueryDevice().SetMirrorMode( PXCMCapture.Device.MirrorMode.MIRROR_MODE_HORIZONTAL); }
private bool ConfigureRealSense() { try { // Create a session instance session = PXCMSession.CreateInstance(); // Create a SenseManager instance from the Session instance senseManager = session.CreateSenseManager(); // Activate user segmentation senseManager.Enable3DSeg(); // Specify image width and height of color stream senseManager.EnableStream(PXCMCapture.StreamType.STREAM_TYPE_COLOR, WIDTH, HEIGHT, 30); // Initialize the pipeline senseManager.Init(); // Mirror the image horizontally senseManager.QueryCaptureManager().QueryDevice().SetMirrorMode(PXCMCapture.Device.MirrorMode.MIRROR_MODE_HORIZONTAL); return(true); } catch (Exception) { return(false); } }
private void InitVideoStream() { senseManager = PXCMSenseManager.CreateInstance(); senseManager.EnableStream(PXCMCapture.StreamType.STREAM_TYPE_DEPTH, 320, 240, 30); pxcmStatus initStatus = senseManager.Init(); if (initStatus == pxcmStatus.PXCM_STATUS_ITEM_UNAVAILABLE) { // No camera, load data from file... OpenFileDialog ofd = new OpenFileDialog(); ofd.Filter = "RSSDK clip|*.rssdk|All files|*.*"; ofd.CheckFileExists = true; ofd.CheckPathExists = true; bool?result = ofd.ShowDialog(); if (result == true) { senseManager.captureManager.SetFileName(ofd.FileName, false); initStatus = senseManager.Init(); } } if (initStatus < pxcmStatus.PXCM_STATUS_NO_ERROR) { throw new Exception(String.Format("Init failed: {0}", initStatus)); } }
private void Initialize() { // SenseManagerを生成する senseManager = PXCMSenseManager.CreateInstance(); if (senseManager == null) { throw new Exception("SenseManagerを生成できませんでした。"); } // 利用可能なデバイスを列挙する PopulateDevice(); // Depthストリームを有効にする pxcmStatus sts = senseManager.EnableStream(PXCMCapture.StreamType.STREAM_TYPE_DEPTH, 0, 0, 0); if (sts < pxcmStatus.PXCM_STATUS_NO_ERROR) { throw new Exception("Depthストリームの有効化に失敗しました"); } // パイプラインを初期化する sts = senseManager.Init(); if (sts < pxcmStatus.PXCM_STATUS_NO_ERROR) { throw new Exception("初期化に失敗しました"); } // デバイス情報を取得する GetDeviceInfo(); }
/// <summary> /// コンストラクタ /// /// カメラ画像表示用のWritableBitmapを準備してImageに割り当て /// カメラ制御用のオブジェクトを取得して /// Colorカメラストリームを有効にしてカメラのフレーム取得を開始する /// カメラキャプチャーするためのTaskを準備して開始 /// </summary> public MainWindow() { // 画面コンポーネントを初期化 InitializeComponent(); // カメラ画像書き込み用のWriteableBitmapを準備してImageコントローラーにセット m_ColorWBitmap = new WriteableBitmap(1920, 1080, 96.0, 96.0, PixelFormats.Bgr32, null); ColorCameraImage.Source = m_ColorWBitmap; // カメラ制御のオブジェクトを取得 m_Session = PXCMSession.CreateInstance(); m_Cm = m_Session.CreateSenseManager(); // Colorカメラストリームを有効にする m_Cm.EnableStream(PXCMCapture.StreamType.STREAM_TYPE_COLOR, 1920, 1080, 30); // カメラのフレーム取得開始 pxcmStatus initState = m_Cm.Init(); if (initState < pxcmStatus.PXCM_STATUS_NO_ERROR) { // エラー発生 MessageBox.Show(initState + "\nカメラ初期化に失敗しました。"); return; } // カメラ取得画像をミラーモードにする m_Cm.captureManager.device.SetMirrorMode(PXCMCapture.Device.MirrorMode.MIRROR_MODE_HORIZONTAL); // カメラキャプチャーをするためのタスクを準備して起動 m_CameraCaptureTask = new Task(() => CaptureCameraProcess()); m_CameraCaptureTask.Start(); }
public MainWindow() { InitializeComponent(); handWaving = false; handTrigger = false; msgTimer = 0; //Instantiate and initialize the SenseManager senseManager = PXCMSenseManager.CreateInstance(); senseManager.EnableStream(PXCMCapture.StreamType.STREAM_TYPE_COLOR, 640, 480, 30); senseManager.EnableHand(); senseManager.Init(); //Configure the Hand Module hand = senseManager.QueryHand(); handConfig = hand.CreateActiveConfiguration(); handConfig.EnableGesture("v_sign"); handConfig.EnableAllAlerts(); handConfig.ApplyChanges(); // Start the worker thread processingThread = new Thread(new ThreadStart(ProcessingThread)); processingThread.Start(); }
private void Initialize() { try { // SenseManagerを生成する senseManager = PXCMSenseManager.CreateInstance(); // カラーストリームを有効にする senseManager.EnableStream(PXCMCapture.StreamType.STREAM_TYPE_COLOR, COLOR_WIDTH, COLOR_HEIGHT, COLOR_FPS); // パイプラインを初期化する pxcmStatus ret = senseManager.Init(); if (ret < pxcmStatus.PXCM_STATUS_NO_ERROR) { throw new Exception("初期化に失敗しました"); } // ミラー表示にする senseManager.QueryCaptureManager().QueryDevice().SetMirrorMode( PXCMCapture.Device.MirrorMode.MIRROR_MODE_HORIZONTAL); // 音声認識を初期化する InitializeSpeechRecognition(); } catch (Exception ex) { MessageBox.Show(ex.Message); Close(); } }
private void startButton_Click(object sender, RoutedEventArgs e) { CurrentIpAdress = ipTextBox.Text; currentPort = portTextBox.Text; senseManager = PXCMSenseManager.CreateInstance(); senseManager.EnableStream(PXCMCapture.StreamType.STREAM_TYPE_COLOR, 640, 480, 60); senseManager.EnableFace(); senseManager.Init(); faceModule = senseManager.QueryFace(); faceConfiguration = faceModule.CreateActiveConfiguration(); faceConfiguration.detection.isEnabled = true; expressionConfiguration = faceConfiguration.QueryExpressions(); expressionConfiguration.Enable(); expressionConfiguration.EnableAllExpressions(); faceConfiguration.landmarks.isEnabled = true; faceConfiguration.landmarks.numLandmarks = 78; faceConfiguration.EnableAllAlerts(); faceConfiguration.ApplyChanges(); captureProcess = new Thread(new ThreadStart(CaptureProcess)); captureProcess.Start(); }
/** * Constructor of the model * It does all the important stuff to use our camera. Its so FANCY ! * Like enabling all important tracker(Hand, Face), the stream and builds up the configuration. * blib blub */ public Model() { emotions["Anger"] = 0; emotions["Fear"] = 0; emotions["Disgust"] = 0; emotions["Surprise"] = 0; emotions["Joy"] = 0; emotions["Sadness"] = 0; emotions["Contempt"] = 0; width = 1920; height = 1080; framerate = 30; senseManager = PXCMSenseManager.CreateInstance(); senseManager.EnableStream(PXCMCapture.StreamType.STREAM_TYPE_COLOR, width, height, framerate); // Enable Face detection senseManager.EnableFace(); senseManager.Init(); face = senseManager.QueryFace(); faceConfig = face.CreateActiveConfiguration(); faceConfig.SetTrackingMode(PXCMFaceConfiguration.TrackingModeType.FACE_MODE_COLOR_PLUS_DEPTH); faceConfig.detection.isEnabled = true; faceConfig.pose.isEnabled = true; faceConfig.ApplyChanges(); faceConfig.Update(); modules = new List <RSModule>(); }
void Start() { // Initialise a PXCMSenseManager instance psm = PXCMSenseManager.CreateInstance(); if (psm == null) { Debug.LogError("SenseManager Init Failed"); return; } // Enable the depth and colour streams psm.EnableStream(PXCMCapture.StreamType.STREAM_TYPE_DEPTH, 640, 480); psm.EnableStream(PXCMCapture.StreamType.STREAM_TYPE_COLOR, 640, 480); // Enable hand analysis pxcmStatus sts = psm.EnableHand(); if (sts != pxcmStatus.PXCM_STATUS_NO_ERROR) { Debug.LogError("SenseManager Hand Init Failed"); OnDisable(); return; } handModule = psm.QueryHand(); // Initialise the execution pipeline sts = psm.Init(); if (sts != pxcmStatus.PXCM_STATUS_NO_ERROR) { Debug.LogError("SenseManager Pipeline Init Failed"); OnDisable(); return; } handData = handModule.CreateOutput(); handConfig = handModule.CreateActiveConfiguration(); handConfig.EnableAllGestures(); handConfig.ApplyChanges(); foreach (CapsuleCollider capsule in GetComponentsInChildren <CapsuleCollider>()) { hands.Add(capsule.gameObject); } mainCamera = GetComponentInChildren <Camera>(); }
/// <summary> /// Device-specific implementation of Connect. /// Connects the camera. /// </summary> /// <remarks>This method is implicitely called by <see cref="Camera.Connect"/> inside a camera lock.</remarks> /// <seealso cref="Camera.Connect"/> protected override void ConnectImpl() { if (deviceInfo.Count == 0) { ScanForCameras(); } if (deviceInfo.Count == 0) { log.Error(Name + "No device found."); return; } int deviceIndex = 0; ScanForProfiles(deviceIndex); /* Create an instance of the PXCSenseManager interface */ pp = PXCMSenseManager.CreateInstance(); if (pp == null) { log.Error(Name + "Failed to create an SDK pipeline object"); return; } pp.captureManager.FilterByDeviceInfo(deviceInfo[deviceIndex]); //TODO: change this to work with properties currentColorProfile = "YUY2 1920x1080x30"; currentDepthProfile = "DEPTH 640x480x60"; currentIRProfile = "Y8 640x480x60"; PXCMCapture.Device.StreamProfileSet currentProfileSet = new PXCMCapture.Device.StreamProfileSet(); currentProfileSet[PXCMCapture.StreamType.STREAM_TYPE_COLOR] = profiles[currentColorProfile]; currentProfileSet[PXCMCapture.StreamType.STREAM_TYPE_DEPTH] = profiles[currentDepthProfile]; currentProfileSet[PXCMCapture.StreamType.STREAM_TYPE_IR] = profiles[currentIRProfile]; /* Set Color & Depth Resolution */ for (int s = 0; s < PXCMCapture.STREAM_LIMIT; s++) { PXCMCapture.StreamType st = PXCMCapture.StreamTypeFromIndex(s); PXCMCapture.Device.StreamProfile info = currentProfileSet[st]; if (info.imageInfo.format != 0) { Single fps = info.frameRate.max; pp.EnableStream(st, info.imageInfo.width, info.imageInfo.height, fps); } } if (pp.Init() >= pxcmStatus.PXCM_STATUS_NO_ERROR) { } else { log.Error(Name + "An error occured."); } ActivateChannel(ChannelNames.Intensity); ActivateChannel(ChannelNames.ZImage); }
/// <summary> /// Starts this instance. /// </summary> public void Start() { if (_sm != null) { throw new ResearchException("Camera is already started."); } _sm = PXCMSenseManager.CreateInstance(); // Configure face detection. if (EnableFace) { _sm.EnableFace(); var faceModule = _sm.QueryFace(); using (PXCMFaceConfiguration faceConfig = faceModule.CreateActiveConfiguration()) { faceConfig.EnableAllAlerts(); faceConfig.pose.isEnabled = true; faceConfig.pose.maxTrackedFaces = 4; if (EnableExpression) { PXCMFaceConfiguration.ExpressionsConfiguration expression = faceConfig.QueryExpressions(); expression.Enable(); expression.EnableAllExpressions(); faceConfig.ApplyChanges(); } } } if (EnableEmotion) { // Configure emotion detection. _sm.EnableEmotion(); } if (EnableStreaming) { // Configure streaming. _sm.EnableStream(PXCMCapture.StreamType.STREAM_TYPE_COLOR, 640, 480); // _sm.EnableStream(PXCMCapture.StreamType.STREAM_TYPE_DEPTH, 640, 480); // _sm.EnableStream(PXCMCapture.StreamType.STREAM_TYPE_IR, 640, 480); } // Event handler for data callbacks. var handler = new PXCMSenseManager.Handler { onModuleProcessedFrame = OnModuleProcessedFrame }; _sm.Init(handler); // GO. Debug.WriteLine("{0} Starting streaming.", Time()); _sm.StreamFrames(false); //Debug.WriteLine("{0} End streaming.", Time()); }
public Snapshot() { DeviceInfo = null; StreamProfileSet = null; MainPanel = PXCMCapture.StreamType.STREAM_TYPE_ANY; PIPPanel = PXCMCapture.StreamType.STREAM_TYPE_ANY; Synced = true; senseManager.EnableStream(PXCMCapture.StreamType.STREAM_TYPE_COLOR, 1920, 1080, 30); senseManager.EnableStream(PXCMCapture.StreamType.STREAM_TYPE_DEPTH, 640, 480, 60); Console.WriteLine(senseManager.captureManager.device); senseManager.Init(); session = senseManager.QuerySession(); }
private void Initialize() { try { // SenseManagerを生成する senceManager = PXCMSenseManager.CreateInstance(); if (senceManager == null) { throw new Exception("SenseManagerの生成に失敗しました"); } // カラーストリームを有効にする pxcmStatus sts = senceManager.EnableStream(PXCMCapture.StreamType.STREAM_TYPE_COLOR, COLOR_WIDTH, COLOR_HEIGHT, COLOR_FPS); if (sts < pxcmStatus.PXCM_STATUS_NO_ERROR) { throw new Exception("カラーストリームの取得に失敗しました"); } InitializeFace(); //描画用の長方形の初期化 rect = new Rectangle[DETECTION_MAXFACES]; for (int i = 0; i < DETECTION_MAXFACES; i++) { rect[i] = new Rectangle(); TranslateTransform transform = new TranslateTransform(COLOR_WIDTH, COLOR_HEIGHT); rect[i].Width = 10; rect[i].Height = 10; rect[i].Stroke = Brushes.Blue; rect[i].StrokeThickness = 3; rect[i].RenderTransform = transform; CanvasForRect.Children.Add(rect[i]); } //追加:表情表示のための初期化 tb = new TextBlock[EXPRESSION_MAXFACES,3]; for (int i = 0; i < EXPRESSION_MAXFACES;i++) { for (int j = 0; j < 3; j++) { tb[i,j] = new TextBlock(); tb[i, j].Width = 200; tb[i, j].Height = 27; tb[i, j].Foreground = new SolidColorBrush(Colors.Red); tb[i, j].FontSize = 20; CanvasPoint.Children.Add(tb[i, j]); } } } catch (Exception ex) { MessageBox.Show(ex.StackTrace); MessageBox.Show("Init:" + ex.Message); Close(); } }
private void ConfigureRealSense() { try { // Create the SenseManager instance sm = PXCMSenseManager.CreateInstance(); // Enable the color stream sm.EnableStream(PXCMCapture.StreamType.STREAM_TYPE_COLOR, ImageWidth, ImageHeight, 30); // Enable person tracking sm.EnablePersonTracking(); personModule = sm.QueryPersonTracking(); PXCMPersonTrackingConfiguration personConfig = personModule.QueryConfiguration(); personConfig.SetTrackedAngles(PXCMPersonTrackingConfiguration.TrackingAngles.TRACKING_ANGLES_ALL); // Enable skeleton tracking - not supported on r200? //PXCMPersonTrackingConfiguration.SkeletonJointsConfiguration skeletonConfig = personConfig.QuerySkeletonJoints(); //skeletonConfig.Enable(); // Enable the face module sm.EnableFace(); PXCMFaceModule faceModule = sm.QueryFace(); PXCMFaceConfiguration faceConfig = faceModule.CreateActiveConfiguration(); faceConfig.SetTrackingMode(PXCMFaceConfiguration.TrackingModeType.FACE_MODE_COLOR_PLUS_DEPTH); faceConfig.strategy = PXCMFaceConfiguration.TrackingStrategyType.STRATEGY_APPEARANCE_TIME; faceConfig.detection.maxTrackedFaces = 1; faceConfig.ApplyChanges(); sm.EnableBlob(); PXCMBlobModule blobModule = sm.QueryBlob(); PXCMBlobConfiguration blobConfig = blobModule.CreateActiveConfiguration(); blobConfig.SetMaxBlobs(4); // 4 is the max blobConfig.SetMaxDistance(2000); // in mm's blobConfig.ApplyChanges(); //initialize the SenseManager sm.Init(); faceData = faceModule.CreateOutput(); blobData = blobModule.CreateOutput(); // Mirror the image sm.QueryCaptureManager().QueryDevice().SetMirrorMode(PXCMCapture.Device.MirrorMode.MIRROR_MODE_HORIZONTAL); // Release resources personConfig.Dispose(); faceConfig.Dispose(); faceModule.Dispose(); blobConfig.Dispose(); blobModule.Dispose(); } catch (Exception) { MessageBox.Show("Unable to configure the RealSense camera. Please make sure a R200 camera is connected.", "System Error"); throw; } }
private void Initialize() { try { // SenseManagerを生成する senceManager = PXCMSenseManager.CreateInstance(); if (senceManager == null) { throw new Exception("SenseManagerの生成に失敗しました"); } // カラーストリームを有効にする pxcmStatus sts = senceManager.EnableStream(PXCMCapture.StreamType.STREAM_TYPE_COLOR, COLOR_WIDTH, COLOR_HEIGHT, COLOR_FPS); if (sts < pxcmStatus.PXCM_STATUS_NO_ERROR) { throw new Exception("カラーストリームの取得に失敗しました"); } InitializeFace(); //描画用の長方形の初期化 rect = new Rectangle[DETECTION_MAXFACES]; for (int i = 0; i < DETECTION_MAXFACES; i++) { rect[i] = new Rectangle(); TranslateTransform transform = new TranslateTransform(COLOR_WIDTH, COLOR_HEIGHT); rect[i].Width = 10; rect[i].Height = 10; rect[i].Stroke = Brushes.Blue; rect[i].StrokeThickness = 3; rect[i].RenderTransform = transform; CanvasForRect.Children.Add(rect[i]); } //追加:表情表示のための初期化 tb = new TextBlock[EXPRESSION_MAXFACES, 3]; for (int i = 0; i < EXPRESSION_MAXFACES; i++) { for (int j = 0; j < 3; j++) { tb[i, j] = new TextBlock(); tb[i, j].Width = 200; tb[i, j].Height = 27; tb[i, j].Foreground = new SolidColorBrush(Colors.Red); tb[i, j].FontSize = 20; CanvasPoint.Children.Add(tb[i, j]); } } } catch (Exception ex) { MessageBox.Show(ex.StackTrace); MessageBox.Show("Init:" + ex.Message); Close(); } }
public DepthData() { sm = PXCMSenseManager.CreateInstance(); Status = sm.EnableStream(PXCMCapture.StreamType.STREAM_TYPE_DEPTH, 640, 480); /*PXCMVideoModule.DataDesc ddesc = new PXCMVideoModule.DataDesc(); * ddesc.deviceInfo.streams = PXCMCapture.StreamType.STREAM_TYPE_DEPTH; * sm.EnableStreams(ddesc);*/ sm.Init(); }
private void ConfigureRealSense(PXCMSession session) { PXCMFaceModule faceModule; PXCMFaceConfiguration faceConfig; // Start the SenseManager and session senseManager = session.CreateSenseManager(); // Enable the color stream senseManager.EnableStream(PXCMCapture.StreamType.STREAM_TYPE_COLOR, 640, 480, 30); // Enable the face module senseManager.EnableFace(); faceModule = senseManager.QueryFace(); faceConfig = faceModule.CreateActiveConfiguration(); // Configure for 3D face tracking (if camera cannot support depth it will revert to 2D tracking) faceConfig.SetTrackingMode(PXCMFaceConfiguration.TrackingModeType.FACE_MODE_COLOR_PLUS_DEPTH); // Enable landmarks detection faceConfig.landmarks.isEnabled = true; // Enable pose detection faceConfig.pose.isEnabled = true; // Enable facial recognition recogConfig = faceConfig.QueryRecognition(); recogConfig.Enable(); // Create a recognition database storage PXCMFaceConfiguration.RecognitionConfiguration.RecognitionStorageDesc recognitionDesc = new PXCMFaceConfiguration.RecognitionConfiguration.RecognitionStorageDesc(); recognitionDesc.maxUsers = MAX_USERS; recogConfig.CreateStorage(DB_NAME, out recognitionDesc); recogConfig.UseStorage(DB_NAME); isDBLoaded = LoadDB(); recogConfig.SetRegistrationMode(PXCMFaceConfiguration.RecognitionConfiguration.RecognitionRegistrationMode.REGISTRATION_MODE_CONTINUOUS); // Apply changes and initialize faceConfig.ApplyChanges(); senseManager.Init(); faceData = faceModule.CreateOutput(); // Mirror image senseManager.QueryCaptureManager().QueryDevice().SetMirrorMode(PXCMCapture.Device.MirrorMode.MIRROR_MODE_HORIZONTAL); //IVCam Setting senseManager.QueryCaptureManager().QueryDevice().SetIVCAMFilterOption(7); senseManager.QueryCaptureManager().QueryDevice().SetIVCAMAccuracy(PXCMCapture.Device.IVCAMAccuracy.IVCAM_ACCURACY_COARSE); senseManager.QueryCaptureManager().QueryDevice().SetIVCAMLaserPower(16); senseManager.QueryCaptureManager().QueryDevice().SetIVCAMMotionRangeTradeOff(100); // Release resources faceConfig.Dispose(); faceModule.Dispose(); }
private void Window_Loaded(object sender, RoutedEventArgs e) { this.DataContext = this; SenseManager = PXCMSenseManager.CreateInstance(); SenseManager.EnableStream(PXCMCapture.StreamType.STREAM_TYPE_COLOR, 1280, 720); SenseManager.EnableFace(); InitializeCamera(); ConfigurePollingTask(); }
private void ConfigureRealSense() { PXCMFaceModule faceModule; PXCMFaceConfiguration faceConfig; // Start the SenseManager and session senseManager = PXCMSenseManager.CreateInstance(); // Enable the color stream senseManager.EnableStream(PXCMCapture.StreamType.STREAM_TYPE_COLOR, 640, 480, 30); //senseManager.EnableStream(PXCMCapture.StreamType.STREAM_TYPE_COLOR, 550, 550, 30); // Enable the face module senseManager.EnableFace(); //senseManager.EnableHand(); faceModule = senseManager.QueryFace(); faceConfig = faceModule.CreateActiveConfiguration(); // Configure for 3D face tracking (if camera cannot support depth it will revert to 2D tracking) faceConfig.SetTrackingMode(PXCMFaceConfiguration.TrackingModeType.FACE_MODE_COLOR_PLUS_DEPTH); expressionConfiguration = faceConfig.QueryExpressions(); expressionConfiguration.Enable(); expressionConfiguration.EnableAllExpressions(); // Enable facial recognition recognitionConfig = faceConfig.QueryRecognition(); recognitionConfig.Enable(); // Create a recognition database PXCMFaceConfiguration.RecognitionConfiguration.RecognitionStorageDesc recognitionDesc = new PXCMFaceConfiguration.RecognitionConfiguration.RecognitionStorageDesc(); recognitionDesc.maxUsers = DatabaseUsers; recognitionConfig.CreateStorage(DatabaseName, out recognitionDesc); recognitionConfig.UseStorage(DatabaseName); LoadDatabaseFromFile(); recognitionConfig.SetRegistrationMode(PXCMFaceConfiguration.RecognitionConfiguration.RecognitionRegistrationMode.REGISTRATION_MODE_CONTINUOUS); // Apply changes and initialize faceConfig.ApplyChanges(); senseManager.Init(); faceData = faceModule.CreateOutput(); // Mirror image senseManager.QueryCaptureManager().QueryDevice().SetMirrorMode(PXCMCapture.Device.MirrorMode.MIRROR_MODE_HORIZONTAL); // Release resources faceConfig.Dispose(); faceModule.Dispose(); }
private PXCMImage colorImage = null; //PXCMImage for color /// <summary> /// Use this for initialization /// Unity function called on the frame when a script is enabled /// just before any of the Update methods is called the first time. /// </summary> void Start() { /* Initialize a PXCMSenseManager instance */ psm = PXCMSenseManager.CreateInstance(); if (psm == null) { Debug.LogError("SenseManager Initialization Failed"); return; } /* Enable the depth stream of size 640x480 and color stream of size 640x480 */ psm.EnableStream(PXCMCapture.StreamType.STREAM_TYPE_DEPTH, 640, 480); psm.EnableStream(PXCMCapture.StreamType.STREAM_TYPE_COLOR, 640, 480); /* Initialize the execution pipeline */ sts = psm.Init(); if (sts != pxcmStatus.PXCM_STATUS_NO_ERROR) { Debug.LogError("PXCMSenseManager.Init Failed"); OnDisable(); return; } }
private PXCMFaceModule faceAnalyzer; //FaceModule Instance /// <summary> /// Use this for initialization /// Unity function called on the frame when a script is enabled /// just before any of the Update methods is called the first time. /// </summary> void Start() { faceRenderer = gameObject.GetComponent <FaceRenderer>(); /* Initialize a PXCMSenseManager instance */ psm = PXCMSenseManager.CreateInstance(); if (psm == null) { Debug.LogError("SenseManager Initialization Failed"); return; } /* Enable the color stream of size 640x480 */ psm.EnableStream(PXCMCapture.StreamType.STREAM_TYPE_COLOR, 640, 480); /* Enable the face tracking module*/ sts = psm.EnableFace(); if (sts != pxcmStatus.PXCM_STATUS_NO_ERROR) { Debug.LogError("PXCSenseManager.EnableFace: " + sts); } /* Retrieve an instance of face to configure */ faceAnalyzer = psm.QueryFace(); if (faceAnalyzer == null) { Debug.LogError("PXCSenseManager.QueryFace"); } /* Initialize the execution pipeline */ sts = psm.Init(); if (sts != pxcmStatus.PXCM_STATUS_NO_ERROR) { Debug.LogError("PXCMSenseManager.Init Failed"); OnDisable(); return; } /* Retrieve a PXCMFaceConfiguration instance from a face to enable Gestures and Alerts */ PXCMFaceConfiguration config = faceAnalyzer.CreateActiveConfiguration(); config.detection.isEnabled = true; // 3D detection is the default tracking mode. config.landmarks.isEnabled = true; config.pose.isEnabled = true; config.QueryExpressions().Enable(); config.QueryExpressions().EnableExpression(PXCMFaceData.ExpressionsData.FaceExpression.EXPRESSION_MOUTH_OPEN); config.EnableAllAlerts(); config.ApplyChanges(); config.Dispose(); }
private void button1_Click(object sender, RoutedEventArgs e) { PXCMSession session = PXCMSession.CreateInstance(); PXCMSession.ImplVersion version = session.QueryVersion(); textBox1.Text = version.major.ToString() + "." + version.minor.ToString(); PXCMSenseManager sm = session.CreateSenseManager(); sm.EnableStream(PXCMCapture.StreamType.STREAM_TYPE_COLOR, 0, 0); sm.EnableStream(PXCMCapture.StreamType.STREAM_TYPE_DEPTH, 0, 0); sm.Init(); pxcmStatus status = sm.AcquireFrame(true); PXCMCapture.Sample sample = sm.QuerySample(); PXCMImage image = sample.color; PXCMImage dimage = sample.depth; PXCMImage.ImageData data; PXCMImage.ImageData data2; image.AcquireAccess(PXCMImage.Access.ACCESS_READ, PXCMImage.PixelFormat.PIXEL_FORMAT_RGB32, out data); dimage.AcquireAccess(PXCMImage.Access.ACCESS_READ, PXCMImage.PixelFormat.PIXEL_FORMAT_DEPTH_RAW, out data2); WriteableBitmap wbm = data.ToWritableBitmap(0, image.info.width, image.info.height, 96.0, 96.0); WriteableBitmap wbm2 = data2.ToWritableBitmap(0, dimage.info.width, dimage.info.height, 96.0, 96.0); image1.Source = wbm; image2.Source = wbm2; image.ReleaseAccess(data); dimage.ReleaseAccess(data2); sm.ReleaseFrame(); sm.Close(); session.Dispose(); }
private void ConfigureRealSense() { my.init(); child.Start(); try { // Create the SenseManager instance sm = PXCMSenseManager.CreateInstance(); // Enable the color stream sm.EnableStream(PXCMCapture.StreamType.STREAM_TYPE_COLOR, ImageWidth, ImageHeight, 30); // Enable person tracking sm.EnablePersonTracking(); personModule = sm.QueryPersonTracking(); PXCMPersonTrackingConfiguration personConfig = personModule.QueryConfiguration(); personConfig.SetTrackedAngles(PXCMPersonTrackingConfiguration.TrackingAngles.TRACKING_ANGLES_ALL); // Enable the face module sm.EnableFace(); PXCMFaceModule faceModule = sm.QueryFace(); PXCMFaceConfiguration faceConfig = faceModule.CreateActiveConfiguration(); faceConfig.SetTrackingMode(PXCMFaceConfiguration.TrackingModeType.FACE_MODE_COLOR_PLUS_DEPTH); faceConfig.strategy = PXCMFaceConfiguration.TrackingStrategyType.STRATEGY_APPEARANCE_TIME; faceConfig.detection.maxTrackedFaces = 1; // Apply changes and initialize the SenseManager faceConfig.ApplyChanges(); sm.Init(); faceData = faceModule.CreateOutput(); // Mirror the image sm.QueryCaptureManager().QueryDevice().SetMirrorMode(PXCMCapture.Device.MirrorMode.MIRROR_MODE_HORIZONTAL); // Release resources personConfig.Dispose(); faceConfig.Dispose(); faceModule.Dispose(); } catch (Exception) { // For the sake of brevity we're not doing extensive exception handling in this code sample, // simply give a hint that the camera is not connected, and then throw the exception MessageBox.Show("Unable to configure the RealSense camera. Please make sure a R200 camera is connected.", "System Error"); throw; } }
private void Initialize() { try { // SenseManagerを生成する senseManager = PXCMSenseManager.CreateInstance(); // カラーストリームを有効にする var sts = senseManager.EnableStream(PXCMCapture.StreamType.STREAM_TYPE_COLOR, COLOR_WIDTH, COLOR_HEIGHT, COLOR_FPS); if (sts < pxcmStatus.PXCM_STATUS_NO_ERROR) { throw new Exception("カラーストリームの有効化にしました"); } // セグメンテーションを有効にする sts = senseManager.Enable3DSeg(); if (sts < pxcmStatus.PXCM_STATUS_NO_ERROR) { throw new Exception("セグメンテーションの有効化にしました"); } // パイプラインを初期化する sts = senseManager.Init(); if (sts < pxcmStatus.PXCM_STATUS_NO_ERROR) { throw new Exception("初期化に失敗しました"); } // ミラー表示にする senseManager.QueryCaptureManager().QueryDevice().SetMirrorMode( PXCMCapture.Device.MirrorMode.MIRROR_MODE_HORIZONTAL); // セグメンテーションオブジェクトを取得する segmentation = senseManager.Query3DSeg(); if (segmentation == null) { throw new Exception("セグメンテーションの取得に失敗しました"); } } catch (Exception ex) { MessageBox.Show(ex.Message); Close(); } }
/** * Initialise View and start updater Thread */ public CameraView(int width, int height, int framterate, List <RSModule> mods) { // Initialise Stuff, turn Camera on senseManager = PXCMSenseManager.CreateInstance(); senseManager.EnableStream(PXCMCapture.StreamType.STREAM_TYPE_COLOR, width, height, framterate); // Enable Face detection senseManager.EnableFace(); modules = mods; modules.ForEach(delegate(RSModule rsm) { rsm.Init(senseManager); }); senseManager.Init(); session = PXCMSession.CreateInstance(); if (session == null) // Something went wrong, session could not be initialised { Console.WriteLine("F**k!"); Application.Exit(); return; } iv = session.QueryVersion(); String versionString = "v" + iv.major + "." + iv.minor; Console.WriteLine(versionString); Text = versionString; pb = new PictureBox(); // Set size pb.Bounds = new Rectangle(0, 0, width, height); // init UI this.Bounds = new Rectangle(0, 0, width, height); this.Controls.Add(pb); FormClosed += new FormClosedEventHandler(Quit); this.Show(); // Start Updater Thread updaterThread = new Thread(this.update); updaterThread.Start(); }
public Form1(PXCMSession session) { InitializeComponent(); //_timer = null; thread = new MyBeautifulThread(DoRecognition); flagThread = 0; LightState = 0; flagLight = 0; WindState = 0; flagWind = 0; SendState = 0; sm = PXCMSenseManager.CreateInstance(); sm.EnableHand(); PXCMHandModule hand = sm.QueryHand(); sm.EnableStream(PXCMCapture.StreamType.STREAM_TYPE_COLOR, 320, 240, 60); PXCMSenseManager.Handler handler = new PXCMSenseManager.Handler { onModuleProcessedFrame = OnModuleProcessedFrame }; sm.Init(handler); PXCMHandConfiguration handConfiguration = sm.QueryHand().CreateActiveConfiguration(); handConfiguration.EnableGesture("wave"); handConfiguration.EnableGesture("swipe_up"); handConfiguration.EnableGesture("thumb_up"); handConfiguration.EnableGesture("tap"); handConfiguration.ApplyChanges(); if (handConfiguration == null) { Console.WriteLine("Failed Create Configuration"); Console.WriteLine("That`s all..."); Console.ReadKey(); } logTextBox.Text = DateTime.Now.ToString("hh:mm:ss") + " Started" + "\n" + logTextBox.Text; }
public override void Listen() { // attach the controller to the PXCM sensor _senseManager = Session.CreateSenseManager(); _senseManager.EnableStream(PXCMCapture.StreamType.STREAM_TYPE_COLOR, 640, 480, 30); _senseManager.EnableHand(); _handModule = _senseManager.QueryHand(); _handData = _handModule.CreateOutput(); _handConfiguration = _handModule.CreateActiveConfiguration(); _handConfiguration.SubscribeGesture(_handGestureHandler); _handConfiguration.SubscribeAlert(_handAlertHandler); _handConfiguration.EnableAlert(PXCMHandData.AlertType.ALERT_HAND_TRACKED); _handConfiguration.EnableAlert(PXCMHandData.AlertType.ALERT_HAND_CALIBRATED); _handConfiguration.EnableGesture("full_pinch"); _handConfiguration.EnableGesture("thumb_up"); _handConfiguration.ApplyChanges(); _senseManager.Init(_handler); sensorActive = true; _senseManager.StreamFrames(true); _senseManager.Close(); }
public MainWindow() { InitializeComponent(); // Instantiate and initialize the SenseManager _senseManager = PXCMSenseManager.CreateInstance(); _senseManager.EnableStream(PXCMCapture.StreamType.STREAM_TYPE_COLOR, 640, 480, 30); _senseManager.EnableHand(); _senseManager.Init(); // Configure the Hand Module _hand = _senseManager.QueryHand(); _handConfig = _hand.CreateActiveConfiguration(); _handConfig.EnableGesture("wave"); _handConfig.EnableAllAlerts(); _handConfig.ApplyChanges(); // Start the worker thread _processingThread = new Thread(new ThreadStart(ProcessingThread)); _processingThread.Start(); }
private void Initialize() { try { // SenseManagerを生成する senseManager = PXCMSenseManager.CreateInstance(); // カラーストリームを有効にする var sts = senseManager.EnableStream( PXCMCapture.StreamType.STREAM_TYPE_COLOR, COLOR_WIDTH, COLOR_HEIGHT, COLOR_FPS ); if ( sts < pxcmStatus.PXCM_STATUS_NO_ERROR ) { throw new Exception( "カラーストリームの有効化にしました" ); } // セグメンテーションを有効にする sts = senseManager.Enable3DSeg(); if ( sts < pxcmStatus.PXCM_STATUS_NO_ERROR ) { throw new Exception( "セグメンテーションの有効化にしました" ); } // パイプラインを初期化する sts = senseManager.Init(); if ( sts < pxcmStatus.PXCM_STATUS_NO_ERROR ) { throw new Exception( "初期化に失敗しました" ); } // ミラー表示にする senseManager.QueryCaptureManager().QueryDevice().SetMirrorMode( PXCMCapture.Device.MirrorMode.MIRROR_MODE_HORIZONTAL ); // セグメンテーションオブジェクトを取得する segmentation = senseManager.Query3DSeg(); if ( segmentation == null ) { throw new Exception( "セグメンテーションの取得に失敗しました" ); } } catch ( Exception ex ) { MessageBox.Show( ex.Message ); Close(); } }
private void Initialize() { // SenseManagerを生成する senseManager = PXCMSenseManager.CreateInstance(); // カラーストリームを有効にする pxcmStatus sts = senseManager.EnableStream( PXCMCapture.StreamType.STREAM_TYPE_DEPTH, DEPTH_WIDTH, DEPTH_HEIGHT, DEPTH_FPS ); if ( sts < pxcmStatus.PXCM_STATUS_NO_ERROR ) { throw new Exception( "Depthストリームの有効化に失敗しました" ); } // パイプラインを初期化する sts = senseManager.Init(); if ( sts < pxcmStatus.PXCM_STATUS_NO_ERROR ) { throw new Exception( "初期化に失敗しました" ); } // ミラー表示にする senseManager.QueryCaptureManager().QueryDevice().SetMirrorMode( PXCMCapture.Device.MirrorMode.MIRROR_MODE_HORIZONTAL ); }
private void Initialize() { try { // SenseManagerを生成する senseManager = PXCMSenseManager.CreateInstance(); // Depthストリームを有効にする var sts = senseManager.EnableStream( PXCMCapture.StreamType.STREAM_TYPE_DEPTH, DEPTH_WIDTH, DEPTH_HEIGHT, DEPTH_FPS ); if ( sts < pxcmStatus.PXCM_STATUS_NO_ERROR ) { throw new Exception( "Depthストリームの有効化に失敗しました" ); } // 手の検出を有効にする sts = senseManager.EnableHand(); if ( sts < pxcmStatus.PXCM_STATUS_NO_ERROR ) { throw new Exception( "手の検出の有効化に失敗しました" ); } // パイプラインを初期化する sts = senseManager.Init(); if ( sts < pxcmStatus.PXCM_STATUS_NO_ERROR ) { throw new Exception( "初期化に失敗しました" ); } // ミラー表示にする senseManager.QueryCaptureManager().QueryDevice().SetMirrorMode( PXCMCapture.Device.MirrorMode.MIRROR_MODE_HORIZONTAL ); // 手の検出の初期化 InitializeHandTracking(); } catch ( Exception ex ) { MessageBox.Show( ex.Message ); Close(); } }
public FaceTrackerThread() { running = true; senseManager = PXCMSenseManager.CreateInstance(); senseManager.EnableStream(PXCMCapture.StreamType.STREAM_TYPE_COLOR, 640, 480, 60); senseManager.EnableFace(); senseManager.Init(); face = senseManager.QueryFace(); faceConfiguration = face.CreateActiveConfiguration(); faceConfiguration.detection.isEnabled = true; expressionConfiguration = faceConfiguration.QueryExpressions(); expressionConfiguration.Enable(); expressionConfiguration.EnableAllExpressions(); //Gaze detection gazec = faceConfiguration.QueryGaze(); gazec.isEnabled = true; faceConfiguration.ApplyChanges(); faceConfiguration.EnableAllAlerts(); faceConfiguration.ApplyChanges(); }
private void Initialize() { // SenseManagerを生成する senseManager = PXCMSenseManager.CreateInstance(); if ( senseManager == null ) { throw new Exception( "SenseManagerを生成できませんでした。" ); } // 利用可能なデバイスを列挙する PopulateDevice(); // Depthストリームを有効にする pxcmStatus sts = senseManager.EnableStream( PXCMCapture.StreamType.STREAM_TYPE_DEPTH, 0, 0, 0 ); if ( sts < pxcmStatus.PXCM_STATUS_NO_ERROR ) { throw new Exception( "Depthストリームの有効化に失敗しました" ); } // パイプラインを初期化する sts = senseManager.Init(); if ( sts < pxcmStatus.PXCM_STATUS_NO_ERROR ) { throw new Exception( "初期化に失敗しました" ); } // デバイス情報を取得する GetDeviceInfo(); }
void OnEnable() { Initialized = false; /* Create a SenseManager instance */ SenseManager = PXCMSenseManager.CreateInstance(); if (SenseManager == null) { print("Unable to create the pipeline instance"); return; } if (_speechCommandsRef.Count != 0) { SetSenseOption(SenseOption.SenseOptionID.Speech); } int numberOfEnabledModalities = 0; //Set mode according to RunMode - play from file / record / live stream if (RunMode == MCTTypes.RunModes.PlayFromFile) { //CHECK IF FILE EXISTS if (!System.IO.File.Exists(FilePath)) { Debug.LogWarning("No Filepath Set Or File Doesn't Exist, Run Mode Will Be Changed to Live Stream"); RunMode = MCTTypes.RunModes.LiveStream; } else { PXCMCaptureManager cManager = SenseManager.QueryCaptureManager(); cManager.SetFileName(FilePath, false); Debug.Log("SenseToolkitManager: Playing from file: " + FilePath); } } if (RunMode == MCTTypes.RunModes.RecordToFile) { //CHECK IF PATH string PathOnly = FilePath; while (!PathOnly[PathOnly.Length - 1].Equals('\\')) { PathOnly = PathOnly.Remove(PathOnly.Length - 1, 1); } if (!System.IO.Directory.Exists(PathOnly)) { Debug.LogWarning("No Filepath Set Or Path Doesn't Exist, Run Mode Will Be Changed to Live Stream"); RunMode = MCTTypes.RunModes.LiveStream; } else { PXCMCaptureManager cManager = SenseManager.QueryCaptureManager(); cManager.SetFileName(FilePath, true); Debug.Log("SenseToolkitManager: Recording to file: " + FilePath); } } /* Enable modalities according to the set options*/ if (IsSenseOptionSet(SenseOption.SenseOptionID.Face, true)) { SenseManager.EnableFace(); _senseOptions.Find( i => i.ID == SenseOption.SenseOptionID.Face).Initialized = true; _senseOptions.Find( i => i.ID == SenseOption.SenseOptionID.Face).Enabled = true; SetSenseOption(SenseOption.SenseOptionID.VideoColorStream); numberOfEnabledModalities++; } if (IsSenseOptionSet(SenseOption.SenseOptionID.Hand, true)) { _sts = SenseManager.EnableHand(); _senseOptions.Find( i => i.ID == SenseOption.SenseOptionID.Hand).Initialized = true; _senseOptions.Find( i => i.ID == SenseOption.SenseOptionID.Hand).Enabled = true; numberOfEnabledModalities++; } if (IsSenseOptionSet(SenseOption.SenseOptionID.Object, true)) { _sts = SenseManager.EnableTracker(); _senseOptions.Find( i => i.ID == SenseOption.SenseOptionID.Object).Initialized = true; _senseOptions.Find( i => i.ID == SenseOption.SenseOptionID.Object).Enabled = true; numberOfEnabledModalities++; } if (IsSenseOptionSet(SenseOption.SenseOptionID.Speech, true)) { if (!SpeechManager.IsInitialized) { if (SpeechManager.InitalizeSpeech()) { _senseOptions.Find( i => i.ID == SenseOption.SenseOptionID.Speech).Initialized = true; _senseOptions.Find( i => i.ID == SenseOption.SenseOptionID.Speech).Enabled = true; numberOfEnabledModalities++; } else { UnsetSenseOption(SenseOption.SenseOptionID.Speech); } } else { _senseOptions.Find( i => i.ID == SenseOption.SenseOptionID.Speech).Initialized = true; _senseOptions.Find( i => i.ID == SenseOption.SenseOptionID.Speech).Enabled = true; numberOfEnabledModalities++; } } if (IsSenseOptionSet(SenseOption.SenseOptionID.VideoDepthStream, true) || IsSenseOptionSet(SenseOption.SenseOptionID.PointCloud, true)) { SenseManager.EnableStream(PXCMCapture.StreamType.STREAM_TYPE_DEPTH, 0, 0, 0); _senseOptions.Find( i => i.ID == SenseOption.SenseOptionID.VideoDepthStream).Initialized = true; _senseOptions.Find( i => i.ID == SenseOption.SenseOptionID.VideoDepthStream).Enabled = true; numberOfEnabledModalities++; } if (IsSenseOptionSet(SenseOption.SenseOptionID.VideoIRStream, true)) { SenseManager.EnableStream(PXCMCapture.StreamType.STREAM_TYPE_IR, 0, 0, 0); _senseOptions.Find( i => i.ID == SenseOption.SenseOptionID.VideoIRStream).Initialized = true; _senseOptions.Find( i => i.ID == SenseOption.SenseOptionID.VideoIRStream).Enabled = true; numberOfEnabledModalities++; } if (IsSenseOptionSet(SenseOption.SenseOptionID.VideoColorStream, true)) { if (ColorImageQuality == MCTTypes.RGBQuality.FullHD) { SenseManager.EnableStream(PXCMCapture.StreamType.STREAM_TYPE_COLOR, 1920, 1080, 0); } else if (ColorImageQuality == MCTTypes.RGBQuality.HD) { SenseManager.EnableStream(PXCMCapture.StreamType.STREAM_TYPE_COLOR, 1280, 720, 0); } else if (ColorImageQuality == MCTTypes.RGBQuality.HalfHD) { SenseManager.EnableStream(PXCMCapture.StreamType.STREAM_TYPE_COLOR, 960, 540, 0); } else { SenseManager.EnableStream(PXCMCapture.StreamType.STREAM_TYPE_COLOR, 640, 480, 0); } _senseOptions.Find( i => i.ID == SenseOption.SenseOptionID.VideoColorStream).Initialized = true; _senseOptions.Find( i => i.ID == SenseOption.SenseOptionID.VideoColorStream).Enabled = true; numberOfEnabledModalities++; } if (IsSenseOptionSet(SenseOption.SenseOptionID.VideoSegmentation, true)) { if (ColorImageQuality == MCTTypes.RGBQuality.FullHD) { SenseManager.EnableStream(PXCMCapture.StreamType.STREAM_TYPE_COLOR, 1920, 1080, 0); } else if (ColorImageQuality == MCTTypes.RGBQuality.HD) { SenseManager.EnableStream(PXCMCapture.StreamType.STREAM_TYPE_COLOR, 1280, 720, 0); } else if (ColorImageQuality == MCTTypes.RGBQuality.HalfHD) { SenseManager.EnableStream(PXCMCapture.StreamType.STREAM_TYPE_COLOR, 960, 540, 0); } else { SenseManager.EnableStream(PXCMCapture.StreamType.STREAM_TYPE_COLOR, 640, 480, 0); } SenseManager.Enable3DSeg(); _senseOptions.Find( i => i.ID == SenseOption.SenseOptionID.VideoSegmentation).Initialized = true; _senseOptions.Find( i => i.ID == SenseOption.SenseOptionID.VideoSegmentation).Enabled = true; numberOfEnabledModalities++; } /* Initialize the execution */ _sts = SenseManager.Init(); if (_sts < pxcmStatus.PXCM_STATUS_NO_ERROR) { if (numberOfEnabledModalities > 0) { Debug.LogError("Unable to initialize all modalities"); } return; } //Set different configurations: // Face if (IsSenseOptionSet(SenseOption.SenseOptionID.Face, true)) { var faceModule = SenseManager.QueryFace(); var faceConfiguration = faceModule.CreateActiveConfiguration(); if (faceConfiguration == null) throw new UnityException("CreateActiveConfiguration returned null"); faceConfiguration.Update(); faceConfiguration.detection.isEnabled = true; faceConfiguration.detection.smoothingLevel = PXCMFaceConfiguration.SmoothingLevelType.SMOOTHING_DISABLED; faceConfiguration.landmarks.isEnabled = true; faceConfiguration.landmarks.smoothingLevel = PXCMFaceConfiguration.SmoothingLevelType.SMOOTHING_DISABLED; faceConfiguration.pose.isEnabled = true; faceConfiguration.pose.smoothingLevel = PXCMFaceConfiguration.SmoothingLevelType.SMOOTHING_DISABLED; faceConfiguration.DisableAllAlerts(); faceConfiguration.strategy = PXCMFaceConfiguration.TrackingStrategyType.STRATEGY_APPEARANCE_TIME; if ((NumberOfDetectedFaces < 1) || (NumberOfDetectedFaces > 15)) { Debug.Log("Ilegal value for Number Of Detected Faces, value is set to 1"); NumberOfDetectedFaces = 1; } faceConfiguration.detection.maxTrackedFaces = NumberOfDetectedFaces; faceConfiguration.landmarks.maxTrackedFaces = NumberOfDetectedFaces; faceConfiguration.pose.maxTrackedFaces = NumberOfDetectedFaces; PXCMFaceConfiguration.ExpressionsConfiguration expressionConfig = faceConfiguration.QueryExpressions(); expressionConfig.Enable(); expressionConfig.EnableAllExpressions(); faceConfiguration.ApplyChanges(); faceConfiguration.Dispose(); FaceModuleOutput = faceModule.CreateOutput(); UnsetSenseOption(SenseOption.SenseOptionID.VideoColorStream); } // Hand if (IsSenseOptionSet(SenseOption.SenseOptionID.Hand, true)) { PXCMHandModule handAnalysis = SenseManager.QueryHand(); PXCMHandConfiguration handConfiguration = handAnalysis.CreateActiveConfiguration(); if (handConfiguration == null) throw new UnityException("CreateActiveConfiguration returned null"); handConfiguration.Update(); handConfiguration.EnableAllGestures(); handConfiguration.EnableStabilizer(true); handConfiguration.DisableAllAlerts(); handConfiguration.EnableSegmentationImage(false); handConfiguration.ApplyChanges(); handConfiguration.Dispose(); HandDataOutput = handAnalysis.CreateOutput(); } if (IsSenseOptionSet(SenseOption.SenseOptionID.Object, true)) { if (_senseOptions.Find( i => i.ID == SenseOption.SenseOptionID.Object).Enabled != true) { _senseOptions.Find( i => i.ID == SenseOption.SenseOptionID.Object).Enabled = true; OnDisable(); OnEnable(); } } if (IsSenseOptionSet(SenseOption.SenseOptionID.Speech, true)) { UpdateSpeechCommands(); SpeechManager.Start(); } // Create an instance for the projection & blob extractor if (Projection == null) { Projection = SenseManager.QueryCaptureManager().QueryDevice().CreateProjection(); } if (BlobExtractor == null) { SenseManager.session.CreateImpl<PXCMBlobExtractor>(out BlobExtractor); } // Set initialization flag Initialized = true; }
public MainWindow() { InitializeComponent(); //set the current date and time currentDateTime = DateTime.Now.ToString("yyyyMMddHHmmssfff"); //set total timer count to 0 and init vars highPerformanceTimer = new HiPerfTimer(); totalHighPerfTimeElapsed = 0; numLinesWritten = 0; //set the total number of lines written to 0 so we can track when to start the timer //init pipe stuff pipeClient = new MyClient(PIPE_NAME); pipeClient.SendMessage("I Am Intel RealSense"); //Debug.WriteLine("Server Ready"); //initialise combobox populateComboBox(); //init the exprToDisplay global var exprToDisplay = ""; //Work on the file //create paths string dirToCreate = "data"; string dirToCreateFull = System.IO.Path.GetFullPath(dirToCreate); Directory.CreateDirectory(dirToCreateFull); dirToCreate = "video"; dirToCreateFull = System.IO.Path.GetFullPath(dirToCreate); Directory.CreateDirectory(dirToCreateFull); //create the csv file to write to file = new StreamWriter("data/" + currentDateTime + "data" + ".csv"); //initialise global expressions array - faster to add the keys here? var enumListMain = Enum.GetNames(typeof(PXCMFaceData.ExpressionsData.FaceExpression)); exprTable = new Hashtable(); string initLine = ""; //Add the column schema //Initial line: timestamp and high prec time initLine += "TIMESTAMP,HIGH_PRECISION_TIME_FROM_START,STIMCODE"; //add all the expression data columns for (int i = 0; i < enumListMain.Length; i++) { exprTable.Add(enumListMain[i], 0); initLine += "," + enumListMain[i]; } //add the bounding rectangle column initLine += "," + "BOUNDING_RECTANGLE_HEIGHT" + "," + "BOUNDING_RECTANGLE_WIDTH" + "," + "BOUNDING_RECTANGLE_X" + "," + "BOUNDING_RECTANGLE_Y"; //add the average depth column initLine += "," + "AVERAGE_DEPTH"; //add landmark points column for (int i = 0; i < LANDMARK_POINTS_TOTAL; i++) { initLine += "," + "LANDMARK_" + i + "_X"; initLine += "," + "LANDMARK_" + i + "_Y"; } //add euler angles columns initLine += "," + "EULER_ANGLE_PITCH" + "," + "EULER_ANGLE_ROLL" + "," + "EULER_ANGLE_YAW"; initLine += "," + "QUATERNION_W" + "," + "QUATERNION_X" + "," + "QUATERNION_Y" + "," + "QUATERNION_Z"; //write the initial row to the file file.WriteLine(initLine); //configure the camera mode selection box cbCameraMode.Items.Add("Color"); cbCameraMode.Items.Add("IR"); cbCameraMode.Items.Add("Depth"); //configure initial camera mode cameraMode = "Color"; //initialise global vars numFacesDetected = 0; handWaving = false; handTrigger = false; handResetTimer = 0; lEyeClosedIntensity = 0; lEyeClosed = false; lEyeClosedTrigger = false; lEyeClosedResetTimer = 0; rEyeClosed = false; rEyeClosedTrigger = false; rEyeClosedResetTimer = 0; rEyeClosedIntensity = 0; emotionEvidence = 0; blinkTrigger = false; blinkResetTimer = 0; //global fps vars prevTime = 0; stopwatch = new Stopwatch(); // Instantiate and initialize the SenseManager senseManager = PXCMSenseManager.CreateInstance(); if (senseManager == null) { MessageBox.Show("Cannot initialise sense manager: closing in 20s, report to Sriram"); Thread.Sleep(20000); Environment.Exit(1); } //capture samples senseManager.captureManager.SetFileName("video/" + currentDateTime + ".raw", true); //Enable color stream senseManager.EnableStream(PXCMCapture.StreamType.STREAM_TYPE_COLOR, STREAM_WIDTH, STREAM_HEIGHT, STREAM_FPS); senseManager.EnableStream(PXCMCapture.StreamType.STREAM_TYPE_DEPTH, STREAM_WIDTH, STREAM_HEIGHT, STREAM_FPS); senseManager.EnableStream(PXCMCapture.StreamType.STREAM_TYPE_IR, STREAM_WIDTH, STREAM_HEIGHT, STREAM_FPS); //Enable face and hand tracking AND EMOTION TRACKING senseManager.EnableHand(); senseManager.EnableFace(); senseManager.EnableEmotion(); //Initialise the senseManager - begin collecting data senseManager.Init(); // Configure the Hand Module hand = senseManager.QueryHand(); handConfig = hand.CreateActiveConfiguration(); handConfig.EnableGesture("wave"); handConfig.EnableAllAlerts(); handConfig.ApplyChanges(); //Configure the Face Module face = senseManager.QueryFace(); faceConfig = face.CreateActiveConfiguration(); faceConfig.EnableAllAlerts(); faceConfig.detection.isEnabled = true; //enables querydetection function to retrieve face loc data faceConfig.detection.maxTrackedFaces = 1; //MAXIMUM TRACKING - 1 FACE faceConfig.ApplyChanges(); //Configure the sub-face-module Expressions exprConfig = faceConfig.QueryExpressions(); exprConfig.Enable(); exprConfig.EnableAllExpressions(); faceConfig.ApplyChanges(); // Start the worker thread that processes the captured data in real-time processingThread = new Thread(new ThreadStart(ProcessingThread)); processingThread.Start(); }
private bool ConfigureRealSense() { try { // Create a session instance session = PXCMSession.CreateInstance(); // Create a SenseManager instance from the Session instance senseManager = session.CreateSenseManager(); // Activate user segmentation senseManager.Enable3DSeg(); // Specify image width and height of color stream senseManager.EnableStream(PXCMCapture.StreamType.STREAM_TYPE_COLOR, WIDTH, HEIGHT, 30); // Initialize the pipeline senseManager.Init(); // Mirror the image horizontally senseManager.QueryCaptureManager().QueryDevice().SetMirrorMode(PXCMCapture.Device.MirrorMode.MIRROR_MODE_HORIZONTAL); return true; } catch (Exception) { return false; } }
/// <summary> /// Starts this instance. /// </summary> public void Start() { if (_sm != null) throw new ResearchException("Camera is already started."); _sm = PXCMSenseManager.CreateInstance(); // Configure face detection. if (EnableFace) { _sm.EnableFace(); var faceModule = _sm.QueryFace(); using (PXCMFaceConfiguration faceConfig = faceModule.CreateActiveConfiguration()) { faceConfig.EnableAllAlerts(); faceConfig.pose.isEnabled = true; faceConfig.pose.maxTrackedFaces = 4; if (EnableExpression) { PXCMFaceConfiguration.ExpressionsConfiguration expression = faceConfig.QueryExpressions(); expression.Enable(); expression.EnableAllExpressions(); faceConfig.ApplyChanges(); } } } if (EnableEmotion) { // Configure emotion detection. _sm.EnableEmotion(); } if (EnableStreaming) { // Configure streaming. _sm.EnableStream(PXCMCapture.StreamType.STREAM_TYPE_COLOR, 640, 480); // _sm.EnableStream(PXCMCapture.StreamType.STREAM_TYPE_DEPTH, 640, 480); // _sm.EnableStream(PXCMCapture.StreamType.STREAM_TYPE_IR, 640, 480); } // Event handler for data callbacks. var handler = new PXCMSenseManager.Handler { onModuleProcessedFrame=OnModuleProcessedFrame }; _sm.Init(handler); // GO. Debug.WriteLine("{0} Starting streaming.", Time()); _sm.StreamFrames(false); //Debug.WriteLine("{0} End streaming.", Time()); }
private void ConfigureRealSense() { PXCMFaceModule faceModule; PXCMFaceConfiguration faceConfig; // Start the SenseManager and session senseManager = PXCMSenseManager.CreateInstance(); captureManager = senseManager.captureManager; // Enable the color stream senseManager.EnableStream(PXCMCapture.StreamType.STREAM_TYPE_COLOR, 640, 480, 60); //senseManager.EnableStream(PXCMCapture.StreamType.STREAM_TYPE_DEPTH, 640, 480, 0); // Enable the face module senseManager.EnableFace(); faceModule = senseManager.QueryFace(); faceConfig = faceModule.CreateActiveConfiguration(); // Configure for 3D face tracking (if camera cannot support depth it will revert to 2D tracking) faceConfig.SetTrackingMode(PXCMFaceConfiguration.TrackingModeType.FACE_MODE_COLOR_PLUS_DEPTH); // Enable facial recognition recognitionConfig = faceConfig.QueryRecognition(); recognitionConfig.Enable(); //Enable Landmark Detection faceConfig.landmarks.isEnabled = true; // Create a recognition database PXCMFaceConfiguration.RecognitionConfiguration.RecognitionStorageDesc recognitionDesc = new PXCMFaceConfiguration.RecognitionConfiguration.RecognitionStorageDesc(); recognitionDesc.maxUsers = DatabaseUsers; //recognitionConfig.CreateStorage(DatabaseName, out recognitionDesc); //recognitionConfig.UseStorage(DatabaseName); LoadDatabaseFromFile(); recognitionConfig.SetRegistrationMode(PXCMFaceConfiguration.RecognitionConfiguration.RecognitionRegistrationMode.REGISTRATION_MODE_CONTINUOUS); // Apply changes and initialize faceConfig.ApplyChanges(); senseManager.Init(); faceData = faceModule.CreateOutput(); // Mirror image senseManager.QueryCaptureManager().QueryDevice().SetMirrorMode(PXCMCapture.Device.MirrorMode.MIRROR_MODE_HORIZONTAL); // Release resources faceConfig.Dispose(); faceModule.Dispose(); }
private void Initialize() { try { // SenseManagerを生成する senseManager = PXCMSenseManager.CreateInstance(); // カラーストリームを有効にする var sts = senseManager.EnableStream( PXCMCapture.StreamType.STREAM_TYPE_COLOR, COLOR_WIDTH, COLOR_HEIGHT, COLOR_FPS ); if ( sts < pxcmStatus.PXCM_STATUS_NO_ERROR ) { throw new Exception( "カラーストリームの有効化に失敗しました" ); } // オブジェクトトラッカーを有効にする sts = senseManager.EnableTracker(); if ( sts < pxcmStatus.PXCM_STATUS_NO_ERROR ) { throw new Exception( "オブジェクトトラッカーの有効化に失敗しました" ); } // パイプラインを初期化する sts = senseManager.Init(); if ( sts < pxcmStatus.PXCM_STATUS_NO_ERROR ) { throw new Exception( "初期化に失敗しました" ); } // ミラー表示にする senseManager.QueryCaptureManager().QueryDevice().SetMirrorMode( PXCMCapture.Device.MirrorMode.MIRROR_MODE_HORIZONTAL ); // オブジェクトトラッカー関連の初期化 InitializeObjectTracking(); } catch ( Exception ex ) { MessageBox.Show( ex.Message ); Close(); } }