private void startButton_Click(object sender, RoutedEventArgs e) { CurrentIpAdress = ipTextBox.Text; currentPort = portTextBox.Text; senseManager = PXCMSenseManager.CreateInstance(); senseManager.EnableStream(PXCMCapture.StreamType.STREAM_TYPE_COLOR, 640, 480, 60); senseManager.EnableFace(); senseManager.Init(); faceModule = senseManager.QueryFace(); faceConfiguration = faceModule.CreateActiveConfiguration(); faceConfiguration.detection.isEnabled = true; expressionConfiguration = faceConfiguration.QueryExpressions(); expressionConfiguration.Enable(); expressionConfiguration.EnableAllExpressions(); faceConfiguration.landmarks.isEnabled = true; faceConfiguration.landmarks.numLandmarks = 78; faceConfiguration.EnableAllAlerts(); faceConfiguration.ApplyChanges(); captureProcess = new Thread(new ThreadStart(CaptureProcess)); captureProcess.Start(); }
static void Main(string[] args) { try { // SenseManagerを生成する senseManager = PXCMSenseManager.CreateInstance(); if (senseManager == null) { throw new Exception("SenseManagerの生成に失敗しました"); } // 何かしら有効にしないとInitが失敗するので適当に有効にする senseManager.EnableFace(); // パイプラインを初期化する var sts = senseManager.Init(); if (sts < pxcmStatus.PXCM_STATUS_NO_ERROR) { throw new Exception("パイプラインの初期化に失敗しました"); } // 使用可能なデバイスを列挙する enumDevice(); } catch (Exception ex) { Console.WriteLine(ex.Message); } }
/** * Constructor of the model * It does all the important stuff to use our camera. Its so FANCY ! * Like enabling all important tracker(Hand, Face), the stream and builds up the configuration. * blib blub */ public Model() { emotions["Anger"] = 0; emotions["Fear"] = 0; emotions["Disgust"] = 0; emotions["Surprise"] = 0; emotions["Joy"] = 0; emotions["Sadness"] = 0; emotions["Contempt"] = 0; width = 1920; height = 1080; framerate = 30; senseManager = PXCMSenseManager.CreateInstance(); senseManager.EnableStream(PXCMCapture.StreamType.STREAM_TYPE_COLOR, width, height, framerate); // Enable Face detection senseManager.EnableFace(); senseManager.Init(); face = senseManager.QueryFace(); faceConfig = face.CreateActiveConfiguration(); faceConfig.SetTrackingMode(PXCMFaceConfiguration.TrackingModeType.FACE_MODE_COLOR_PLUS_DEPTH); faceConfig.detection.isEnabled = true; faceConfig.pose.isEnabled = true; faceConfig.ApplyChanges(); faceConfig.Update(); modules = new List <RSModule>(); }
public FrameReader(ObjectPool <MemoryFrame> pool, SmithersLogger logger) { #if DSAPI _dsAPI = new DSAPIManaged(); _dsAPI.initializeDevice(); #else _pool = pool; _logger = logger; //Directory.SetCurrentDirectory("") // TODO Inject this instead of creating it _session = PXCMSession.CreateInstance(); if (_session == null) { throw new Smithers.Reading.FrameData.ScannerNotFoundException("No valid plugged-in DS4 sensor found."); } _senseManager = PXCMSenseManager.CreateInstance(); if (_senseManager == null) { throw new Smithers.Reading.FrameData.ScannerNotFoundException("Failed to create an SDK pipeline object"); } _session.SetCoordinateSystem(PXCMSession.CoordinateSystem.COORDINATE_SYSTEM_REAR_OPENCV); #endif this.Synced = true; this.Mirrored = false; this.Record = false; this.Playback = false; this.RealTime = true; }
public MainWindow() { InitializeComponent(); handWaving = false; handTrigger = false; msgTimer = 0; //Instantiate and initialize the SenseManager senseManager = PXCMSenseManager.CreateInstance(); senseManager.EnableStream(PXCMCapture.StreamType.STREAM_TYPE_COLOR, 640, 480, 30); senseManager.EnableHand(); senseManager.Init(); //Configure the Hand Module hand = senseManager.QueryHand(); handConfig = hand.CreateActiveConfiguration(); handConfig.EnableGesture("v_sign"); handConfig.EnableAllAlerts(); handConfig.ApplyChanges(); // Start the worker thread processingThread = new Thread(new ThreadStart(ProcessingThread)); processingThread.Start(); }
public void ConfigureRealSense() { // Create an instance of the SenseManager sm = PXCMSenseManager.CreateInstance(); // Enable cursor tracking sm.EnableHandCursor(); // Create a session of the RealSense session = PXCMSession.CreateInstance(); // Get an instance of the hand cursor module cursorModule = sm.QueryHandCursor(); // Get an instance of the cursor configuration cursorConfig = cursorModule.CreateActiveConfiguration(); // Make configuration changes and apply them cursorConfig.EnableEngagement(true); cursorConfig.EnableAllGestures(); cursorConfig.EnableAllAlerts(); cursorConfig.ApplyChanges(); // Initialize the SenseManager pipeline sm.Init(); }
public MainWindow() { InitializeComponent(); _senseManager = PXCMSenseManager.CreateInstance(); _senseManager.EnableHand(); var handManager = _senseManager.QueryHand(); _handConfig = handManager.CreateActiveConfiguration(); _handConfig.EnableGesture("thumb_up"); _handConfig.EnableGesture("thumb_down"); //_handConfig.EnableGesture("fist"); //_handConfig.EnableGesture("spreadfingers"); _handConfig.EnableAllAlerts(); _handConfig.ApplyChanges(); var status = _senseManager.Init(); if (status >= pxcmStatus.PXCM_STATUS_NO_ERROR) { _cancellationTokenSource = new CancellationTokenSource(); _task = Task.Factory.StartNew(x => ProcessInput(_cancellationTokenSource.Token), TaskCreationOptions.LongRunning, _cancellationTokenSource.Token); } }
private void Initialize() { try { // SenseManagerを生成する senseManager = PXCMSenseManager.CreateInstance(); // カラーストリームを有効にする senseManager.EnableStream(PXCMCapture.StreamType.STREAM_TYPE_COLOR, COLOR_WIDTH, COLOR_HEIGHT, COLOR_FPS); // パイプラインを初期化する pxcmStatus ret = senseManager.Init(); if (ret < pxcmStatus.PXCM_STATUS_NO_ERROR) { throw new Exception("初期化に失敗しました"); } // ミラー表示にする senseManager.QueryCaptureManager().QueryDevice().SetMirrorMode( PXCMCapture.Device.MirrorMode.MIRROR_MODE_HORIZONTAL); // 音声認識を初期化する InitializeSpeechRecognition(); } catch (Exception ex) { MessageBox.Show(ex.Message); Close(); } }
private void InitVideoStream() { senseManager = PXCMSenseManager.CreateInstance(); senseManager.EnableStream(PXCMCapture.StreamType.STREAM_TYPE_DEPTH, 320, 240, 30); pxcmStatus initStatus = senseManager.Init(); if (initStatus == pxcmStatus.PXCM_STATUS_ITEM_UNAVAILABLE) { // No camera, load data from file... OpenFileDialog ofd = new OpenFileDialog(); ofd.Filter = "RSSDK clip|*.rssdk|All files|*.*"; ofd.CheckFileExists = true; ofd.CheckPathExists = true; bool?result = ofd.ShowDialog(); if (result == true) { senseManager.captureManager.SetFileName(ofd.FileName, false); initStatus = senseManager.Init(); } } if (initStatus < pxcmStatus.PXCM_STATUS_NO_ERROR) { throw new Exception(String.Format("Init failed: {0}", initStatus)); } }
private void Initialize() { // SenseManagerを生成する senseManager = PXCMSenseManager.CreateInstance(); // カラーストリームを有効にする pxcmStatus sts = senseManager.EnableStream( PXCMCapture.StreamType.STREAM_TYPE_DEPTH, DEPTH_WIDTH, DEPTH_HEIGHT, DEPTH_FPS); if (sts < pxcmStatus.PXCM_STATUS_NO_ERROR) { throw new Exception("Depthストリームの有効化に失敗しました"); } // パイプラインを初期化する sts = senseManager.Init(); if (sts < pxcmStatus.PXCM_STATUS_NO_ERROR) { throw new Exception("初期化に失敗しました"); } // ミラー表示にする senseManager.QueryCaptureManager().QueryDevice().SetMirrorMode( PXCMCapture.Device.MirrorMode.MIRROR_MODE_HORIZONTAL); }
// Use this for initialization void Start() { rotationType = RotationType.Pinch; //sm = PXCMSession.CreateInstance(); /* Initialize a PXCMSenseManager instance */ sm = PXCMSenseManager.CreateInstance(); if (sm != null) { /* Enable hand tracking and configure the hand module */ pxcmStatus sts = sm.EnableHand(); if (sts == pxcmStatus.PXCM_STATUS_NO_ERROR) { /* Hand module interface instance */ hand = sm.QueryHand(); /* Hand data interface instance */ hand_data = hand.CreateOutput(); // Create hand configuration instance and configure hcfg = hand.CreateActiveConfiguration(); hcfg.EnableAllAlerts(); hcfg.SubscribeAlert(OnFiredAlert); hcfg.EnableNormalizedJoints(true); hcfg.ApplyChanges(); hcfg.Dispose(); /* Initialize the execution pipeline */ if (sm.Init() != pxcmStatus.PXCM_STATUS_NO_ERROR) { OnDisable(); } } } }
private void Initialize() { // SenseManagerを生成する senseManager = PXCMSenseManager.CreateInstance(); if (senseManager == null) { throw new Exception("SenseManagerを生成できませんでした。"); } // 利用可能なデバイスを列挙する PopulateDevice(); // Depthストリームを有効にする pxcmStatus sts = senseManager.EnableStream(PXCMCapture.StreamType.STREAM_TYPE_DEPTH, 0, 0, 0); if (sts < pxcmStatus.PXCM_STATUS_NO_ERROR) { throw new Exception("Depthストリームの有効化に失敗しました"); } // パイプラインを初期化する sts = senseManager.Init(); if (sts < pxcmStatus.PXCM_STATUS_NO_ERROR) { throw new Exception("初期化に失敗しました"); } // デバイス情報を取得する GetDeviceInfo(); }
private void Initialize() { try { // SenseManagerを生成する senseManager = PXCMSenseManager.CreateInstance(); // Blobを有効にする var sts = senseManager.EnableBlob(); if (sts < pxcmStatus.PXCM_STATUS_NO_ERROR) { throw new Exception("Blobの有効化に失敗しました"); } // パイプラインを初期化する sts = senseManager.Init(); if (sts < pxcmStatus.PXCM_STATUS_NO_ERROR) { throw new Exception("初期化に失敗しました"); } // ミラー表示にする senseManager.QueryCaptureManager().QueryDevice().SetMirrorMode( PXCMCapture.Device.MirrorMode.MIRROR_MODE_HORIZONTAL); // Blobを初期化する InitializeBlob(); } catch (Exception ex) { MessageBox.Show(ex.Message); Close(); } }
/// <summary> /// Starts this instance. /// </summary> public void Start() { if (_sm != null) { throw new ResearchException("Camera is already started."); } _sm = PXCMSenseManager.CreateInstance(); // Configure face detection. if (EnableFace) { _sm.EnableFace(); var faceModule = _sm.QueryFace(); using (PXCMFaceConfiguration faceConfig = faceModule.CreateActiveConfiguration()) { faceConfig.EnableAllAlerts(); faceConfig.pose.isEnabled = true; faceConfig.pose.maxTrackedFaces = 4; if (EnableExpression) { PXCMFaceConfiguration.ExpressionsConfiguration expression = faceConfig.QueryExpressions(); expression.Enable(); expression.EnableAllExpressions(); faceConfig.ApplyChanges(); } } } if (EnableEmotion) { // Configure emotion detection. _sm.EnableEmotion(); } if (EnableStreaming) { // Configure streaming. _sm.EnableStream(PXCMCapture.StreamType.STREAM_TYPE_COLOR, 640, 480); // _sm.EnableStream(PXCMCapture.StreamType.STREAM_TYPE_DEPTH, 640, 480); // _sm.EnableStream(PXCMCapture.StreamType.STREAM_TYPE_IR, 640, 480); } // Event handler for data callbacks. var handler = new PXCMSenseManager.Handler { onModuleProcessedFrame = OnModuleProcessedFrame }; _sm.Init(handler); // GO. Debug.WriteLine("{0} Starting streaming.", Time()); _sm.StreamFrames(false); //Debug.WriteLine("{0} End streaming.", Time()); }
//RealSenseメソッド------------------------------------------------------------------- /// <summary> 機能の初期化 </summary> private bool InitializeRealSense() { try { //SenseManagerを生成 senseManager = PXCMSenseManager.CreateInstance(); //カラーストリームの有効 var sts = senseManager.EnableStream(PXCMCapture.StreamType.STREAM_TYPE_COLOR, COLOR_WIDTH, COLOR_HEIGHT, COLOR_FPS); if (sts < pxcmStatus.PXCM_STATUS_NO_ERROR) { throw new Exception("Colorストリームの有効化に失敗しました"); } // Depthストリームを有効にする sts = senseManager.EnableStream(PXCMCapture.StreamType.STREAM_TYPE_DEPTH, DEPTH_WIDTH, DEPTH_HEIGHT, DEPTH_FPS); if (sts < pxcmStatus.PXCM_STATUS_NO_ERROR) { throw new Exception("Depthストリームの有効化に失敗しました"); } // 手の検出を有効にする sts = senseManager.EnableHand(); if (sts < pxcmStatus.PXCM_STATUS_NO_ERROR) { throw new Exception("手の検出の有効化に失敗しました"); } //パイプラインを初期化する //(インスタンスはInit()が正常終了した後作成されるので,機能に対する各種設定はInit()呼び出し後となる) sts = senseManager.Init(); if (sts < pxcmStatus.PXCM_STATUS_NO_ERROR) { throw new Exception("パイプラインの初期化に失敗しました"); } //ミラー表示にする senseManager.QueryCaptureManager().QueryDevice().SetMirrorMode( PXCMCapture.Device.MirrorMode.MIRROR_MODE_HORIZONTAL); //デバイスを取得する device = senseManager.captureManager.device; //座標変換オブジェクトを作成 projection = device.CreateProjection(); // 手の検出の初期化 InitializeHandTracking(); return(true); } catch (Exception ex) { Console.WriteLine(ex.Message); return(false); } }
/// <summary> /// Device-specific implementation of Connect. /// Connects the camera. /// </summary> /// <remarks>This method is implicitely called by <see cref="Camera.Connect"/> inside a camera lock.</remarks> /// <seealso cref="Camera.Connect"/> protected override void ConnectImpl() { if (deviceInfo.Count == 0) { ScanForCameras(); } if (deviceInfo.Count == 0) { log.Error(Name + "No device found."); return; } int deviceIndex = 0; ScanForProfiles(deviceIndex); /* Create an instance of the PXCSenseManager interface */ pp = PXCMSenseManager.CreateInstance(); if (pp == null) { log.Error(Name + "Failed to create an SDK pipeline object"); return; } pp.captureManager.FilterByDeviceInfo(deviceInfo[deviceIndex]); //TODO: change this to work with properties currentColorProfile = "YUY2 1920x1080x30"; currentDepthProfile = "DEPTH 640x480x60"; currentIRProfile = "Y8 640x480x60"; PXCMCapture.Device.StreamProfileSet currentProfileSet = new PXCMCapture.Device.StreamProfileSet(); currentProfileSet[PXCMCapture.StreamType.STREAM_TYPE_COLOR] = profiles[currentColorProfile]; currentProfileSet[PXCMCapture.StreamType.STREAM_TYPE_DEPTH] = profiles[currentDepthProfile]; currentProfileSet[PXCMCapture.StreamType.STREAM_TYPE_IR] = profiles[currentIRProfile]; /* Set Color & Depth Resolution */ for (int s = 0; s < PXCMCapture.STREAM_LIMIT; s++) { PXCMCapture.StreamType st = PXCMCapture.StreamTypeFromIndex(s); PXCMCapture.Device.StreamProfile info = currentProfileSet[st]; if (info.imageInfo.format != 0) { Single fps = info.frameRate.max; pp.EnableStream(st, info.imageInfo.width, info.imageInfo.height, fps); } } if (pp.Init() >= pxcmStatus.PXCM_STATUS_NO_ERROR) { } else { log.Error(Name + "An error occured."); } ActivateChannel(ChannelNames.Intensity); ActivateChannel(ChannelNames.ZImage); }
private void ConfigureRealSense() { try { // Create the SenseManager instance sm = PXCMSenseManager.CreateInstance(); // Enable the color stream sm.EnableStream(PXCMCapture.StreamType.STREAM_TYPE_COLOR, ImageWidth, ImageHeight, 30); // Enable person tracking sm.EnablePersonTracking(); personModule = sm.QueryPersonTracking(); PXCMPersonTrackingConfiguration personConfig = personModule.QueryConfiguration(); personConfig.SetTrackedAngles(PXCMPersonTrackingConfiguration.TrackingAngles.TRACKING_ANGLES_ALL); // Enable skeleton tracking - not supported on r200? //PXCMPersonTrackingConfiguration.SkeletonJointsConfiguration skeletonConfig = personConfig.QuerySkeletonJoints(); //skeletonConfig.Enable(); // Enable the face module sm.EnableFace(); PXCMFaceModule faceModule = sm.QueryFace(); PXCMFaceConfiguration faceConfig = faceModule.CreateActiveConfiguration(); faceConfig.SetTrackingMode(PXCMFaceConfiguration.TrackingModeType.FACE_MODE_COLOR_PLUS_DEPTH); faceConfig.strategy = PXCMFaceConfiguration.TrackingStrategyType.STRATEGY_APPEARANCE_TIME; faceConfig.detection.maxTrackedFaces = 1; faceConfig.ApplyChanges(); sm.EnableBlob(); PXCMBlobModule blobModule = sm.QueryBlob(); PXCMBlobConfiguration blobConfig = blobModule.CreateActiveConfiguration(); blobConfig.SetMaxBlobs(4); // 4 is the max blobConfig.SetMaxDistance(2000); // in mm's blobConfig.ApplyChanges(); //initialize the SenseManager sm.Init(); faceData = faceModule.CreateOutput(); blobData = blobModule.CreateOutput(); // Mirror the image sm.QueryCaptureManager().QueryDevice().SetMirrorMode(PXCMCapture.Device.MirrorMode.MIRROR_MODE_HORIZONTAL); // Release resources personConfig.Dispose(); faceConfig.Dispose(); faceModule.Dispose(); blobConfig.Dispose(); blobModule.Dispose(); } catch (Exception) { MessageBox.Show("Unable to configure the RealSense camera. Please make sure a R200 camera is connected.", "System Error"); throw; } }
private void Initialize() { try { // SenseManagerを生成する senceManager = PXCMSenseManager.CreateInstance(); if (senceManager == null) { throw new Exception("SenseManagerの生成に失敗しました"); } // カラーストリームを有効にする pxcmStatus sts = senceManager.EnableStream(PXCMCapture.StreamType.STREAM_TYPE_COLOR, COLOR_WIDTH, COLOR_HEIGHT, COLOR_FPS); if (sts < pxcmStatus.PXCM_STATUS_NO_ERROR) { throw new Exception("カラーストリームの取得に失敗しました"); } InitializeFace(); //描画用の長方形の初期化 rect = new Rectangle[DETECTION_MAXFACES]; for (int i = 0; i < DETECTION_MAXFACES; i++) { rect[i] = new Rectangle(); TranslateTransform transform = new TranslateTransform(COLOR_WIDTH, COLOR_HEIGHT); rect[i].Width = 10; rect[i].Height = 10; rect[i].Stroke = Brushes.Blue; rect[i].StrokeThickness = 3; rect[i].RenderTransform = transform; CanvasForRect.Children.Add(rect[i]); } //追加:表情表示のための初期化 tb = new TextBlock[EXPRESSION_MAXFACES, 3]; for (int i = 0; i < EXPRESSION_MAXFACES; i++) { for (int j = 0; j < 3; j++) { tb[i, j] = new TextBlock(); tb[i, j].Width = 200; tb[i, j].Height = 27; tb[i, j].Foreground = new SolidColorBrush(Colors.Red); tb[i, j].FontSize = 20; CanvasPoint.Children.Add(tb[i, j]); } } } catch (Exception ex) { MessageBox.Show(ex.StackTrace); MessageBox.Show("Init:" + ex.Message); Close(); } }
void InitializeSenseManager() { //Initialize a PXCMSenseManager instance psm = PXCMSenseManager.CreateInstance(); if (psm == null) { Debug.LogError("SenseManager Initialization Failed"); //return; } }
protected void GetSessionAndSenseManager() { this.senseManager = PXCMSenseManager.CreateInstance(); if (this.senseManager == null) { throw new Exception("Could not create Sense Manager."); } this.session = this.senseManager.session; }
public DepthData() { sm = PXCMSenseManager.CreateInstance(); Status = sm.EnableStream(PXCMCapture.StreamType.STREAM_TYPE_DEPTH, 640, 480); /*PXCMVideoModule.DataDesc ddesc = new PXCMVideoModule.DataDesc(); * ddesc.deviceInfo.streams = PXCMCapture.StreamType.STREAM_TYPE_DEPTH; * sm.EnableStreams(ddesc);*/ sm.Init(); }
private void Initialize() { try { // SenseManagerを生成する senseManager = PXCMSenseManager.CreateInstance(); // カラーストリームを有効にする var sts = senseManager.EnableStream(PXCMCapture.StreamType.STREAM_TYPE_COLOR, COLOR_WIDTH, COLOR_HEIGHT, COLOR_FPS); if (sts < pxcmStatus.PXCM_STATUS_NO_ERROR) { throw new Exception("カラーストリームの有効化に失敗しました"); } // Depthストリームを有効にする sts = senseManager.EnableStream(PXCMCapture.StreamType.STREAM_TYPE_DEPTH, DEPTH_WIDTH, DEPTH_HEIGHT, DEPTH_FPS); if (sts < pxcmStatus.PXCM_STATUS_NO_ERROR) { throw new Exception("Depthストリームの有効化に失敗しました"); } // 手の検出を有効にする sts = senseManager.EnableHand(); if (sts < pxcmStatus.PXCM_STATUS_NO_ERROR) { throw new Exception("手の検出の有効化に失敗しました"); } // パイプラインを初期化する sts = senseManager.Init(); if (sts < pxcmStatus.PXCM_STATUS_NO_ERROR) { throw new Exception("初期化に失敗しました"); } // デバイスを取得する var device = senseManager.QueryCaptureManager().QueryDevice(); // ミラー表示にする device.SetMirrorMode( PXCMCapture.Device.MirrorMode.MIRROR_MODE_HORIZONTAL); // 座標変換オブジェクトを作成 projection = device.CreateProjection(); // 手の検出の初期化 InitializeHandTracking(); } catch (Exception ex) { MessageBox.Show(ex.Message); Close(); } }
private void Window_Loaded(object sender, RoutedEventArgs e) { this.DataContext = this; SenseManager = PXCMSenseManager.CreateInstance(); SenseManager.EnableStream(PXCMCapture.StreamType.STREAM_TYPE_COLOR, 1280, 720); SenseManager.EnableFace(); InitializeCamera(); ConfigurePollingTask(); }
private void ConfigureRealSense() { PXCMFaceModule faceModule; PXCMFaceConfiguration faceConfig; // Start the SenseManager and session senseManager = PXCMSenseManager.CreateInstance(); // Enable the color stream senseManager.EnableStream(PXCMCapture.StreamType.STREAM_TYPE_COLOR, 640, 480, 30); //senseManager.EnableStream(PXCMCapture.StreamType.STREAM_TYPE_COLOR, 550, 550, 30); // Enable the face module senseManager.EnableFace(); //senseManager.EnableHand(); faceModule = senseManager.QueryFace(); faceConfig = faceModule.CreateActiveConfiguration(); // Configure for 3D face tracking (if camera cannot support depth it will revert to 2D tracking) faceConfig.SetTrackingMode(PXCMFaceConfiguration.TrackingModeType.FACE_MODE_COLOR_PLUS_DEPTH); expressionConfiguration = faceConfig.QueryExpressions(); expressionConfiguration.Enable(); expressionConfiguration.EnableAllExpressions(); // Enable facial recognition recognitionConfig = faceConfig.QueryRecognition(); recognitionConfig.Enable(); // Create a recognition database PXCMFaceConfiguration.RecognitionConfiguration.RecognitionStorageDesc recognitionDesc = new PXCMFaceConfiguration.RecognitionConfiguration.RecognitionStorageDesc(); recognitionDesc.maxUsers = DatabaseUsers; recognitionConfig.CreateStorage(DatabaseName, out recognitionDesc); recognitionConfig.UseStorage(DatabaseName); LoadDatabaseFromFile(); recognitionConfig.SetRegistrationMode(PXCMFaceConfiguration.RecognitionConfiguration.RecognitionRegistrationMode.REGISTRATION_MODE_CONTINUOUS); // Apply changes and initialize faceConfig.ApplyChanges(); senseManager.Init(); faceData = faceModule.CreateOutput(); // Mirror image senseManager.QueryCaptureManager().QueryDevice().SetMirrorMode(PXCMCapture.Device.MirrorMode.MIRROR_MODE_HORIZONTAL); // Release resources faceConfig.Dispose(); faceModule.Dispose(); }
private PXCMFaceModule faceAnalyzer; //FaceModule Instance /// <summary> /// Use this for initialization /// Unity function called on the frame when a script is enabled /// just before any of the Update methods is called the first time. /// </summary> void Start() { faceRenderer = gameObject.GetComponent <FaceRenderer>(); /* Initialize a PXCMSenseManager instance */ psm = PXCMSenseManager.CreateInstance(); if (psm == null) { Debug.LogError("SenseManager Initialization Failed"); return; } /* Enable the color stream of size 640x480 */ psm.EnableStream(PXCMCapture.StreamType.STREAM_TYPE_COLOR, 640, 480); /* Enable the face tracking module*/ sts = psm.EnableFace(); if (sts != pxcmStatus.PXCM_STATUS_NO_ERROR) { Debug.LogError("PXCSenseManager.EnableFace: " + sts); } /* Retrieve an instance of face to configure */ faceAnalyzer = psm.QueryFace(); if (faceAnalyzer == null) { Debug.LogError("PXCSenseManager.QueryFace"); } /* Initialize the execution pipeline */ sts = psm.Init(); if (sts != pxcmStatus.PXCM_STATUS_NO_ERROR) { Debug.LogError("PXCMSenseManager.Init Failed"); OnDisable(); return; } /* Retrieve a PXCMFaceConfiguration instance from a face to enable Gestures and Alerts */ PXCMFaceConfiguration config = faceAnalyzer.CreateActiveConfiguration(); config.detection.isEnabled = true; // 3D detection is the default tracking mode. config.landmarks.isEnabled = true; config.pose.isEnabled = true; config.QueryExpressions().Enable(); config.QueryExpressions().EnableExpression(PXCMFaceData.ExpressionsData.FaceExpression.EXPRESSION_MOUTH_OPEN); config.EnableAllAlerts(); config.ApplyChanges(); config.Dispose(); }
private void StartRealSense() { Console.WriteLine("Starting Touchless Controller"); pxcmStatus rc; // creating Sense Manager psm = PXCMSenseManager.CreateInstance(); Console.WriteLine("Creating SenseManager: " + psm == null ? "failed" : "success"); if (psm == null) { MessageBox.Show("Failed to create SenseManager!", "Failed", MessageBoxButton.OK, MessageBoxImage.Error); Environment.Exit(-1); } // work from file if a filename is given as command line argument string[] args = Environment.GetCommandLineArgs(); if (args.Length > 1) { psm.captureManager.SetFileName(args[1], false); } // Enable touchless controller in the multimodal pipeline rc = psm.EnableTouchlessController(null); Console.WriteLine("Enabling Touchless Controller: " + rc.ToString()); if (rc != pxcmStatus.PXCM_STATUS_NO_ERROR) { MessageBox.Show("Failed to enable touchless controller!", "Failed", MessageBoxButton.OK, MessageBoxImage.Error); Environment.Exit(-1); } // initialize the pipeline PXCMSenseManager.Handler handler = new PXCMSenseManager.Handler(); rc = psm.Init(handler); Console.WriteLine("Initializing the pipeline: " + rc.ToString()); if (rc != pxcmStatus.PXCM_STATUS_NO_ERROR) { MessageBox.Show("Failed to initialize the pipeline!", "Failed", MessageBoxButton.OK, MessageBoxImage.Error); Environment.Exit(-1); } // getting touchless controller ptc = psm.QueryTouchlessController(); if (ptc == null) { MessageBox.Show("Failed to get touchless controller!", "Failed", MessageBoxButton.OK, MessageBoxImage.Error); Environment.Exit(-1); } ptc.SubscribeEvent(new PXCMTouchlessController.OnFiredUXEventDelegate(OnTouchlessControllerUXEvent)); }
private void StartRealSense() { bool useHead = bool.Parse(ConfigurationManager.AppSettings["UseHead"]); // Instantiate and initialize the SenseManager senseManager = PXCMSenseManager.CreateInstance(); // Configure the Hand Module if (useHead) { senseManager.EnableFace(); face = senseManager.QueryFace(); faceConfig = face.CreateActiveConfiguration(); faceConfig.detection.isEnabled = true; faceConfig.QueryExpressions().Enable(); faceConfig.ApplyChanges(); } else { // Enable cursor tracking senseManager.EnableHand(); // Get an instance of the hand cursor module hand = senseManager.QueryHand(); // Get an instance of the cursor configuration var cursorConfig = hand.CreateActiveConfiguration(); // Make configuration changes and apply them cursorConfig.DisableAllAlerts(); cursorConfig.EnableTrackedJoints(true); cursorConfig.EnableStabilizer(true); cursorConfig.ApplyChanges(); } senseManager.Init(); // Create an instance of PXCMSmoother senseManager.session.CreateImpl <PXCMSmoother>(out smoother); smoother2D = smoother.Create2DQuadratic(.5F); smoother2D2 = smoother.Create2DQuadratic(1); // Start the worker thread processingThread = new Thread(new ThreadStart(ProcessingThread)); processingThread.Start(); }
public void RSStart() { try { this.SenseManager = PXCMSenseManager.CreateInstance(); /* */ if (InitializeFace() >= pxcmStatus.PXCM_STATUS_NO_ERROR) { this.Timer.Start(); } } catch (Exception ex) { this.Message = ex.Message; } }
void Start() { // Initialise a PXCMSenseManager instance psm = PXCMSenseManager.CreateInstance(); if (psm == null) { Debug.LogError("SenseManager Init Failed"); return; } // Enable the depth and colour streams psm.EnableStream(PXCMCapture.StreamType.STREAM_TYPE_DEPTH, 640, 480); psm.EnableStream(PXCMCapture.StreamType.STREAM_TYPE_COLOR, 640, 480); // Enable hand analysis pxcmStatus sts = psm.EnableHand(); if (sts != pxcmStatus.PXCM_STATUS_NO_ERROR) { Debug.LogError("SenseManager Hand Init Failed"); OnDisable(); return; } handModule = psm.QueryHand(); // Initialise the execution pipeline sts = psm.Init(); if (sts != pxcmStatus.PXCM_STATUS_NO_ERROR) { Debug.LogError("SenseManager Pipeline Init Failed"); OnDisable(); return; } handData = handModule.CreateOutput(); handConfig = handModule.CreateActiveConfiguration(); handConfig.EnableAllGestures(); handConfig.ApplyChanges(); foreach (CapsuleCollider capsule in GetComponentsInChildren <CapsuleCollider>()) { hands.Add(capsule.gameObject); } mainCamera = GetComponentInChildren <Camera>(); }
private void ConfigureRealSense() { my.init(); child.Start(); try { // Create the SenseManager instance sm = PXCMSenseManager.CreateInstance(); // Enable the color stream sm.EnableStream(PXCMCapture.StreamType.STREAM_TYPE_COLOR, ImageWidth, ImageHeight, 30); // Enable person tracking sm.EnablePersonTracking(); personModule = sm.QueryPersonTracking(); PXCMPersonTrackingConfiguration personConfig = personModule.QueryConfiguration(); personConfig.SetTrackedAngles(PXCMPersonTrackingConfiguration.TrackingAngles.TRACKING_ANGLES_ALL); // Enable the face module sm.EnableFace(); PXCMFaceModule faceModule = sm.QueryFace(); PXCMFaceConfiguration faceConfig = faceModule.CreateActiveConfiguration(); faceConfig.SetTrackingMode(PXCMFaceConfiguration.TrackingModeType.FACE_MODE_COLOR_PLUS_DEPTH); faceConfig.strategy = PXCMFaceConfiguration.TrackingStrategyType.STRATEGY_APPEARANCE_TIME; faceConfig.detection.maxTrackedFaces = 1; // Apply changes and initialize the SenseManager faceConfig.ApplyChanges(); sm.Init(); faceData = faceModule.CreateOutput(); // Mirror the image sm.QueryCaptureManager().QueryDevice().SetMirrorMode(PXCMCapture.Device.MirrorMode.MIRROR_MODE_HORIZONTAL); // Release resources personConfig.Dispose(); faceConfig.Dispose(); faceModule.Dispose(); } catch (Exception) { // For the sake of brevity we're not doing extensive exception handling in this code sample, // simply give a hint that the camera is not connected, and then throw the exception MessageBox.Show("Unable to configure the RealSense camera. Please make sure a R200 camera is connected.", "System Error"); throw; } }