//フレーム全体の更新処理 private void updateFrame() { // フレームを取得する pxcmStatus ret = senceManager.AcquireFrame(false); if (ret < pxcmStatus.PXCM_STATUS_NO_ERROR) { return; } //顔のデータを更新する updateFaceFrame(); // フレームを解放する senceManager.ReleaseFrame(); }
/// <summary> /// Grab the blob data every frame /// </summary> void Update() { if (instance.AcquireFrame(true) == pxcmStatus.PXCM_STATUS_NO_ERROR) { /* To store all blob points */ blobPointsPos = new List <Vector2>(); PXCMCapture.Sample sample = instance.QuerySample(); if (sample != null && sample.depth != null) { PXCMImage.ImageInfo info = sample.depth.QueryInfo(); if (blobData != null) { blobData.Update(); int numblobs = blobData.QueryNumberOfBlobs(); for (int i = 0; i <= numblobs; i++) { PXCMBlobData.IBlob pBlob; if (blobData.QueryBlobByAccessOrder(i, PXCMBlobData.AccessOrderType.ACCESS_ORDER_NEAR_TO_FAR, out pBlob) == pxcmStatus.PXCM_STATUS_NO_ERROR) { Vector3 centerPoint = pBlob.QueryExtremityPoint(PXCMBlobData.ExtremityType.EXTREMITY_CENTER); Vector3 topPoint = pBlob.QueryExtremityPoint(PXCMBlobData.ExtremityType.EXTREMITY_TOP_MOST); Vector3 bottomPoint = pBlob.QueryExtremityPoint(PXCMBlobData.ExtremityType.EXTREMITY_BOTTOM_MOST); Vector3 leftPoint = pBlob.QueryExtremityPoint(PXCMBlobData.ExtremityType.EXTREMITY_LEFT_MOST); Vector3 rightPoint = pBlob.QueryExtremityPoint(PXCMBlobData.ExtremityType.EXTREMITY_RIGHT_MOST); Vector3 closestPoint = pBlob.QueryExtremityPoint(PXCMBlobData.ExtremityType.EXTREMITY_CLOSEST); blobPointsPos.Add(new Vector2(centerPoint.x * -1, centerPoint.y * -1)); blobPointsPos.Add(new Vector2(topPoint.x * -1, topPoint.y * -1)); blobPointsPos.Add(new Vector2(bottomPoint.x * -1, bottomPoint.y * -1)); blobPointsPos.Add(new Vector2(leftPoint.x * -1, leftPoint.y * -1)); blobPointsPos.Add(new Vector2(rightPoint.x * -1, rightPoint.y * -1)); blobPointsPos.Add(new Vector2(closestPoint.x * -1, closestPoint.y * -1)); DisplayPoints(); if (pBlob.QueryContourPoints(0, out pointOuter[i]) == pxcmStatus.PXCM_STATUS_NO_ERROR) { DisplayContour(pointOuter[i], i, numblobs); } } } } } instance.ReleaseFrame(); } }
private static void Fetcher() { // It is yet unclear why we need the handler here // But without it, we get no data PXCMSenseManager.Handler handler = new PXCMSenseManager.Handler(); senseManager.Init(handler); // Register the handler object while (fetching && senseManager.IsConnected()) { // This is a blocking operation // We need to do this is a separate thread if (senseManager.AcquireFrame(true) < pxcmStatus.PXCM_STATUS_NO_ERROR) { break; } if (!senseManager.IsConnected()) { continue; } senseManager.ReleaseFrame(); //if (bodyTrackingEnabled) // UpdateBodyTracking(); //if (handTrackingEnabled) // UpdateHandTracking(); if (faceTrackingEnabled) { UpdateFace(); } } if (senseManager != null) { //if (handTrackingEnabled) // StopHandTracking(); if (faceTrackingEnabled) { StopFaceTracking(); } senseManager.Close(); senseManager = null; } }
private void ProcessDepth() { PXCMCapture.Sample sample; PXCMImage.ImageData imageData; while (senseManager.AcquireFrame(true) >= pxcmStatus.PXCM_STATUS_NO_ERROR) { sample = senseManager.QuerySample(); sample.depth.AcquireAccess(PXCMImage.Access.ACCESS_READ, PXCMImage.PixelFormat.PIXEL_FORMAT_DEPTH, out imageData); var image = ConvertDepthToBitmap(imageData, sample); UpdateUI(image); sample.depth.ReleaseAccess(imageData); senseManager.ReleaseFrame(); Thread.Sleep(TimeSpan.FromSeconds(0.5)); } }
void updateThread() { while (true) { if (manager.AcquireFrame(true) < pxcmStatus.PXCM_STATUS_NO_ERROR) { break; } PXCMHandModule hand = manager.QueryHand(); PXCMCapture.Sample sample = manager.QuerySample(); this.newColorFrame(0, sample); this.newDepthFrame(0, sample); this.newIRFrame(0, sample); this.newHandFrame(hand); manager.ReleaseFrame(); } }
/// <summary> RealSesnseの更新 </summary> private void UpdateRealSense() { //フレームを取得する //AcquireFrame()の引数はすべての機能の更新が終るまで待つかどうかを指定 //ColorやDepthによって更新間隔が異なるので設定によって値を変更 var ret = senseManager.AcquireFrame(true); if (ret < pxcmStatus.PXCM_STATUS_NO_ERROR) { return; } //フレームデータを取得する PXCMCapture.Sample sample = senseManager.QuerySample(); if (sample != null) { //カラー画像の表示 UpdateColorImage(sample.color); } //手のデータを更新 UpdateHandFrame(); //演奏領域の表示 //if (ensembleTimer.IsEnabled) if (ensembleTimer.IsEnabled) { for (int k = 0; k < currentFreqs.Length; k++) { SolidColorBrush myBrush = new SolidColorBrush(Constants.colors[k]); myBrush.Opacity = 0.25; AddRectangle( ColorImage.Height / currentFreqs.Length * k, ColorImage.Height / currentFreqs.Length, ColorImage.Width, Brushes.Black, 1.0d, myBrush); } } //フレームを解放する senseManager.ReleaseFrame(); }
private void PollingCode() { PXCMFaceData faceData = FaceModule.CreateOutput(); while (!PollingTaskCancellationToken.IsCancellationRequested) { if (SenseManager.AcquireFrame().IsSuccessful()) { faceData.Update(); var face = faceData.QueryFaceByIndex(0); var sample = SenseManager.QuerySample(); ElaborateSample(sample, face); if (!PollingTaskCancellationToken.IsCancellationRequested) { SenseManager.ReleaseFrame(); } } } }
// Update is called once per frame void Update() { if (sm != null) { /* Wait until any frame data is available */ if (sm.AcquireFrame(false, 0) == pxcmStatus.PXCM_STATUS_NO_ERROR) { /* Retrieve latest hand data, only update slingshot if ready */ if (hand_data.Update() == pxcmStatus.PXCM_STATUS_NO_ERROR) { ; } { TrackJoints(hand_data); } /* Now, release the current frame so we can process the next frame */ sm.ReleaseFrame(); } } }
void CompositionTarget_Rendering(object sender, EventArgs e) { try { // フレームを取得する pxcmStatus ret = senseManager.AcquireFrame(false); if (ret < pxcmStatus.PXCM_STATUS_NO_ERROR) { return; } // 手のデータを更新する UpdateHandFrame(); // フレームを解放する senseManager.ReleaseFrame(); } catch (Exception ex) { MessageBox.Show(ex.Message); Close(); } }
void UpdateFrame() { // フレームを取得する pxcmStatus ret = senseManager.AcquireFrame(true); if (ret < pxcmStatus.PXCM_STATUS_NO_ERROR) { return; } // フレームデータを取得する PXCMCapture.Sample sample = senseManager.QuerySample(); if (sample != null) { // 各データを表示する UpdateDepthImage(sample.depth); } // フレームを解放する senseManager.ReleaseFrame(); }
/// <summary> /// 简易版本的流程,将color图像流置于picturebox中 /// 后续将修改为带有面部和手部的流 /// </summary> public void NaivePipeline() { PXCMSenseManager pp = m_form.Session.CreateSenseManager(); if (pp == null) { throw new Exception("PXCMSenseManager null"); } pp.EnableStream(PXCMCapture.StreamType.STREAM_TYPE_COLOR, 640, 360); pp.Init(); while (!m_form.Stopped) { if (pp.AcquireFrame(true).IsError()) { break; } var isConnected = pp.IsConnected(); if (isConnected) { var sample = pp.QuerySample(); if (sample == null) { pp.ReleaseFrame(); continue; } // default is COLOR DisplayPicture(sample.color); m_form.UpdatePic(); } pp.ReleaseFrame(); } pp.Close(); pp.Dispose(); }
private void ProcessInput(CancellationToken token) { // Wait for available data while (!token.IsCancellationRequested && _senseManager.AcquireFrame(true) >= pxcmStatus.PXCM_STATUS_NO_ERROR) { try { var handQuery = _senseManager.QueryHand(); if (handQuery != null) { var handData = handQuery.CreateOutput(); // Get processing results handData.Update(); PXCMHandData.GestureData gestureData; if (handData.IsGestureFired("thumb_down", out gestureData)) { Dispatcher.Invoke(ThumbDown); } else if (handData.IsGestureFired("thumb_up", out gestureData)) { Dispatcher.Invoke(ThumbUp); } else if (handData.IsGestureFired("fist", out gestureData)) { Dispatcher.Invoke(HandFist); } else if (handData.IsGestureFired("spreadfingers", out gestureData)) { Dispatcher.Invoke(HandSpreadFingers); } handData.Dispose(); } } finally { _senseManager.ReleaseFrame(); } } }
void CompositionTarget_Rendering(object sender, EventArgs e) { try { // フレームを取得する pxcmStatus ret = senseManager.AcquireFrame(false); if (ret < pxcmStatus.PXCM_STATUS_NO_ERROR) { return; } // フレームデータを取得する PXCMCapture.Sample sample = senseManager.QuerySample(); UpdateColorImage(sample.color); // フレームを解放する senseManager.ReleaseFrame(); } catch (Exception ex) { MessageBox.Show(ex.Message); Close(); } }
void CompositionTarget_Rendering(object sender, EventArgs e) { try { // フレームを取得する pxcmStatus ret = senseManager.AcquireFrame(false); if (ret < pxcmStatus.PXCM_STATUS_NO_ERROR) { return; } // セグメンテーションデータを取得する var image = segmentation.AcquireSegmentedImage(); UpdateSegmentationImage(image); // フレームを解放する senseManager.ReleaseFrame(); } catch (Exception ex) { MessageBox.Show(ex.Message); Close(); } }
public static IObservable <Bitmap> AsObservable(this PXCMSenseManager manager) { // TODO: refer stream fps var interval = Math.Floor((double)(1000 / colorStreamFPS)); return(Observable.Interval(TimeSpan.FromMilliseconds(interval)) .Where(_ => manager.AcquireFrame(true) >= pxcmStatus.PXCM_STATUS_NO_ERROR) .Select(_ => { PXCMCapture.Sample sample = manager.QuerySample(); PXCMImage image = sample.color; PXCMImage.ImageData imageData; image.AcquireAccess(PXCMImage.Access.ACCESS_READ, PXCMImage.PixelFormat.PIXEL_FORMAT_RGB32, out imageData); Bitmap bitmapImage = imageData.ToBitmap(0, image.info.width, image.info.height); image.ReleaseAccess(imageData); manager.ReleaseFrame(); return bitmapImage; })); }
/// <summary> RealSesnseの更新 </summary> private void UpdateRealSense() { //フレームを取得する //AcquireFrame()の引数はすべての機能の更新が終るまで待つかどうかを指定 //ColorやDepthによって更新間隔が異なるので設定によって値を変更 var ret = senseManager.AcquireFrame(true); if (ret < pxcmStatus.PXCM_STATUS_NO_ERROR) { return; } //フレームデータを取得する PXCMCapture.Sample sample = senseManager.QuerySample(); if (sample != null) { //カラー画像の表示 UpdateColorImage(sample.color); } //手のデータを更新 UpdateHandFrame(); //演奏領域の表示 for (int k = 0; k < 5; k++) { SolidColorBrush myBrush = new SolidColorBrush(colors[k]); myBrush.Opacity = 0.50; AddRectangle( imageColor.Height / 5 * k, imageColor.Height / 5, imageColor.Width, Brushes.Black, 1.0d, myBrush); } //フレームを解放する senseManager.ReleaseFrame(); }
private void ProcessingThread() { // Start AcquireFrame/ReleaseFrame loop while (senseManager.AcquireFrame(true) >= pxcmStatus.PXCM_STATUS_NO_ERROR) { PXCMCapture.Sample sample = senseManager.QuerySample(); Bitmap colorBitmap; PXCMImage.ImageData colorData; // Get color image data sample.color.AcquireAccess(PXCMImage.Access.ACCESS_READ, PXCMImage.PixelFormat.PIXEL_FORMAT_RGB24, out colorData); colorBitmap = colorData.ToBitmap(0, sample.color.info.width, sample.color.info.height); // Retrieve gesture data hand = senseManager.QueryHand(); if (hand != null) { // Retrieve the most recent processed data handData = hand.CreateOutput(); handData.Update(); handWaving = handData.IsGestureFired("v_sign", out gestureData); } // Update the user interface UpdateUI(colorBitmap); // Release the frame if (handData != null) { handData.Dispose(); } colorBitmap.Dispose(); sample.color.ReleaseAccess(colorData); senseManager.ReleaseFrame(); } }
private void button1_Click(object sender, RoutedEventArgs e) { PXCMSession session = PXCMSession.CreateInstance(); PXCMSession.ImplVersion version = session.QueryVersion(); textBox1.Text = version.major.ToString() + "." + version.minor.ToString(); PXCMSenseManager sm = session.CreateSenseManager(); sm.EnableStream(PXCMCapture.StreamType.STREAM_TYPE_COLOR, 0, 0); sm.EnableStream(PXCMCapture.StreamType.STREAM_TYPE_DEPTH, 0, 0); sm.Init(); pxcmStatus status = sm.AcquireFrame(true); PXCMCapture.Sample sample = sm.QuerySample(); PXCMImage image = sample.color; PXCMImage dimage = sample.depth; PXCMImage.ImageData data; PXCMImage.ImageData data2; image.AcquireAccess(PXCMImage.Access.ACCESS_READ, PXCMImage.PixelFormat.PIXEL_FORMAT_RGB32, out data); dimage.AcquireAccess(PXCMImage.Access.ACCESS_READ, PXCMImage.PixelFormat.PIXEL_FORMAT_DEPTH_RAW, out data2); WriteableBitmap wbm = data.ToWritableBitmap(0, image.info.width, image.info.height, 96.0, 96.0); WriteableBitmap wbm2 = data2.ToWritableBitmap(0, dimage.info.width, dimage.info.height, 96.0, 96.0); image1.Source = wbm; image2.Source = wbm2; image.ReleaseAccess(data); dimage.ReleaseAccess(data2); sm.ReleaseFrame(); sm.Close(); session.Dispose(); }
void Update() { if (gestureCooldown > 0) { gestureCooldown -= Time.deltaTime; } if (psm == null) { return; } if (psm.AcquireFrame(true) != pxcmStatus.PXCM_STATUS_NO_ERROR) { return; } drawCameraVision(); handData.Update(); if (currentEffect == 2) { PXCMHandData.JointData[,] nodes = getHandInfo(); followJoints(nodes); } else if (currentEffect == 3) { // point cloud createDepthMesh(); } moveHands(); handleGestures(); // Release the frame psm.ReleaseFrame(); }
private void Update() { // Start AcquireFrame-ReleaseFrame loop while (senseManager.AcquireFrame(true) >= pxcmStatus.PXCM_STATUS_NO_ERROR) { PXCMCapture.Sample sample = senseManager.QuerySample(); Bitmap colorBitmap; PXCMImage.ImageData colorData; blobData.Update(); /*while (blobData.QueryNumberOfBlobs() < 2) * { * trackingDistance += 100; * blobConfig.SetMaxDistance(trackingDistance); * blobConfig.ApplyChanges(); * blobData.Update(); * * senseManager.ReleaseFrame(); * * if(trackingDistance > 3000) * trackingDistance = 600; * } * { * } */ for (int i = 0; i < 2; i++) { blobData.QueryBlobByAccessOrder(i, PXCMBlobData.AccessOrderType.ACCESS_ORDER_NEAR_TO_FAR, out blobList[i]); } if (blobData.QueryNumberOfBlobs() == 2) { if (blobCoordinates[3, (int)cord.Y] == -1) { //smoothing } if (blobList[0].QueryExtremityPoint(PXCMBlobData.ExtremityType.EXTREMITY_CENTER).x > blobList[1].QueryExtremityPoint(PXCMBlobData.ExtremityType.EXTREMITY_CENTER).x) { blobCoordinates[(int)hand.LEFT, (int)cord.Y] = blobList[0].QueryExtremityPoint(PXCMBlobData.ExtremityType.EXTREMITY_CENTER).y; blobCoordinates[(int)hand.RIGHT, (int)cord.Y] = blobList[1].QueryExtremityPoint(PXCMBlobData.ExtremityType.EXTREMITY_CENTER).y; blobCoordinates[(int)hand.LEFT, (int)cord.X] = blobList[0].QueryExtremityPoint(PXCMBlobData.ExtremityType.EXTREMITY_CENTER).x; blobCoordinates[(int)hand.RIGHT, (int)cord.X] = blobList[1].QueryExtremityPoint(PXCMBlobData.ExtremityType.EXTREMITY_CENTER).x; } else { blobCoordinates[(int)hand.RIGHT, (int)cord.Y] = blobList[0].QueryExtremityPoint(PXCMBlobData.ExtremityType.EXTREMITY_CENTER).y; blobCoordinates[(int)hand.LEFT, (int)cord.Y] = blobList[1].QueryExtremityPoint(PXCMBlobData.ExtremityType.EXTREMITY_CENTER).y; blobCoordinates[(int)hand.RIGHT, (int)cord.X] = blobList[0].QueryExtremityPoint(PXCMBlobData.ExtremityType.EXTREMITY_CENTER).x; blobCoordinates[(int)hand.LEFT, (int)cord.X] = blobList[1].QueryExtremityPoint(PXCMBlobData.ExtremityType.EXTREMITY_CENTER).x; } } // Tonausgabe: aktuelle Tonausgabe beenden und neue beginnen frequency = (int)(blobCoordinates[(int)hand.LEFT, (int)cord.Y] * 1.8); volume = (500 - blobCoordinates[(int)hand.RIGHT, (int)cord.Y]) / 500; sineWaveProvider.Frequency = frequency; sineWaveProvider.Amplitude = volume; // Get color image data sample.color.AcquireAccess(PXCMImage.Access.ACCESS_READ, PXCMImage.PixelFormat.PIXEL_FORMAT_RGB32, out colorData); colorBitmap = colorData.ToBitmap(0, sample.color.info.width, sample.color.info.height); // Update UI Render(colorBitmap); // Release frame colorBitmap.Dispose(); sample.color.ReleaseAccess(colorData); senseManager.ReleaseFrame(); } }
//*********************************私有函数******************************************************************* // 循环执行流的主体程序 private void DoStreaming() { this.m_stopped = false; InitStreamState(); switch (m_algoOption) { // 面部算法 case AlgoOption.Face: this.faceModule = manager.QueryFace(); if (faceModule == null) { MessageBox.Show("QueryFace failed"); return; } InitFaceState(); this.faceData = this.faceModule.CreateOutput(); if (faceData == null) { MessageBox.Show("CreateOutput failed"); return; } break; } if (manager.Init() < pxcmStatus.PXCM_STATUS_NO_ERROR) { #if DEBUG System.Windows.Forms.MessageBox.Show("init failed"); #endif return; } while (!m_stopped) { //if (m_pause) //{ // System.Threading.Thread.Sleep(10); // continue; //} if (manager.AcquireFrame(true).IsError()) { break; } this.sample = manager.QuerySample(); if (sample.depth != null) { this.m_timestamp = (sample.depth.timeStamp); } else if (sample.color != null) { this.m_timestamp = sample.color.timeStamp; } m_timestamp_sec = m_timestamp / 10000000; if (m_timestamp_sec_init == -1) { m_timestamp_sec_init = m_timestamp_sec; } if (this.m_label != null) { //updateLabel(this.m_timestamp.ToString()); System.Threading.Thread t1 = new System.Threading.Thread(updateLabel); t1.Start((m_timestamp_sec - m_timestamp_sec_init).ToString()); } //OnTimeStampChanged(this.m_timestamp.ToString()); // 原生算法调用处理,并缓存实时数据 faceData.Update(); FacialLandmarks fl = this.GetFaceLandmarks(); // 用于显示视频流功能 if (m_display) { this.DoRender(); } manager.ReleaseFrame(); } faceData.Dispose(); manager.Dispose(); }
private void FaceTrackingPipeline() { IsDispose = false; OnStart?.Invoke(this, null); #region Manager Init realSenseManager = RealSenseObjects.Session.CreateSenseManager(); if (realSenseManager == null) { MessageBox.Show( "PXCMSenseManager初始化失敗。", "初始化失敗", MessageBoxButtons.OK, MessageBoxIcon.Error); OnStop?.Invoke(this, null); return; } PXCMCaptureManager captureManager = realSenseManager.captureManager; if (captureManager == null) { MessageBox.Show( "PXCMCaptureManager初始化失敗。", "初始化失敗", MessageBoxButtons.OK, MessageBoxIcon.Error); OnStop?.Invoke(this, null); return; } #endregion #region 基本設定 //設定裝置 captureManager.FilterByDeviceInfo(Form.SelectedDevice); //設定串流類型 captureManager.FilterByStreamProfiles(Form.SelectedDeviceStreamProfile); //啟用臉部追蹤模組 realSenseManager.EnableFace(); PXCMFaceModule faceModule = realSenseManager.QueryFace(); if (faceModule == null) { MessageBox.Show( "取得PXCMFaceModule失敗。", "初始化失敗", MessageBoxButtons.OK, MessageBoxIcon.Error); OnStop?.Invoke(this, null); return; } //建立臉部追蹤模組設定 moduleConfiguration = faceModule.CreateActiveConfiguration(); if (moduleConfiguration == null) { MessageBox.Show( "建立PXCMFaceConfiguration失敗。", "初始化失敗", MessageBoxButtons.OK, MessageBoxIcon.Error); OnStop?.Invoke(this, null); return; } //追蹤模式設定 moduleConfiguration.SetTrackingMode(Form.ModeType); moduleConfiguration.strategy = PXCMFaceConfiguration.TrackingStrategyType.STRATEGY_RIGHT_TO_LEFT; moduleConfiguration.detection.isEnabled = true; moduleConfiguration.detection.maxTrackedFaces = 4;//最大追蹤4個臉 moduleConfiguration.landmarks.isEnabled = false; moduleConfiguration.pose.isEnabled = false; recognitionConfig = moduleConfiguration.QueryRecognition(); if (recognitionConfig == null) { MessageBox.Show( "建立RecognitionConfiguration失敗。", "初始化失敗", MessageBoxButtons.OK, MessageBoxIcon.Error); OnStop?.Invoke(this, null); return; } recognitionConfig.Enable(); #endregion #region 讀取資料庫數據 if (Form.FaceData != null) { recognitionConfig.SetDatabase(Form.FaceData); moduleConfiguration.ApplyChanges(); } #endregion #region 預備啟動 moduleConfiguration.EnableAllAlerts(); //moduleConfiguration.SubscribeAlert(FaceAlertHandler); pxcmStatus applyChangesStatus = moduleConfiguration.ApplyChanges(); Form.SetStatus("RealSenseManager初始化中"); if (applyChangesStatus.IsError() || realSenseManager.Init().IsError()) { MessageBox.Show( "RealSenseManager初始化失敗,請檢查設定正確。", "初始化失敗", MessageBoxButtons.OK, MessageBoxIcon.Error); OnStop?.Invoke(this, null); return; } #endregion using (moduleOutput = faceModule.CreateOutput()) { PXCMCapture.Device.StreamProfileSet profiles; PXCMCapture.Device device = captureManager.QueryDevice(); if (device == null) { MessageBox.Show( "取得設備失敗。", "初始化失敗", MessageBoxButtons.OK, MessageBoxIcon.Error); OnStop?.Invoke(this, null); return; } device.QueryStreamProfileSet(PXCMCapture.StreamType.STREAM_TYPE_DEPTH, 0, out profiles); #region Loop while (!_Stop) { while (_Paush) { Application.DoEvents(); } if (realSenseManager.AcquireFrame(true).IsError()) { break; } var isConnected = realSenseManager.IsConnected(); if (isConnected) { var sample = realSenseManager.QueryFaceSample(); if (sample == null) { realSenseManager.ReleaseFrame(); continue; } #region 畫面取出 PXCMImage image = null; if (Form.ModeType == PXCMFaceConfiguration.TrackingModeType.FACE_MODE_IR) { image = sample.ir; } else { image = sample.color; } #endregion moduleOutput.Update();//更新辨識 PXCMFaceConfiguration.RecognitionConfiguration recognition = moduleConfiguration.QueryRecognition(); if (recognition == null) { realSenseManager.ReleaseFrame(); continue; } #region 繪圖與事件 OnFrame?.Invoke(this, new FaceRecognitionEventArgs() { Image = ToBitmap(image) }); FindFace(moduleOutput); #endregion } //發布框 realSenseManager.ReleaseFrame(); } #endregion //更新資料庫緩衝區 //Buffer = moduleOutput.QueryRecognitionModule().GetDatabaseBuffer(); } #region 釋放資源 moduleConfiguration.Dispose(); realSenseManager.Close(); realSenseManager.Dispose(); #endregion IsDispose = true; OnStop?.Invoke(this, null); }
private void ProcessingThread() { try { while (senseManager.AcquireFrame(true) >= pxcmStatus.PXCM_STATUS_NO_ERROR) { PXCMCapture.Sample sample = senseManager.QuerySample(); PXCMImage.ImageData colorData; sample.color.AcquireAccess(PXCMImage.Access.ACCESS_READ, PXCMImage.PixelFormat.PIXEL_FORMAT_RGB24, out colorData); Bitmap colorBitmap = colorData.ToBitmap(0, sample.color.info.width, sample.color.info.height); // Get face data if (faceData != null) { faceData.Update(); numFacesDetected = faceData.QueryNumberOfDetectedFaces(); if (numFacesDetected > 0) { // Get the first face detected (index 0) PXCMFaceData.Face face = faceData.QueryFaceByIndex(numFacesDetected - 1); // Retrieve face location data PXCMFaceData.DetectionData faceDetectionData = face.QueryDetection(); if (faceDetectionData != null) { PXCMRectI32 faceRectangle; faceDetectionData.QueryBoundingRect(out faceRectangle); faceRectangleHeight = faceRectangle.h; faceRectangleWidth = faceRectangle.w; faceRectangleX = faceRectangle.x; faceRectangleY = faceRectangle.y; } // Process face recognition data if (face != null) { // Retrieve the recognition data instance recognitionData = face.QueryRecognition(); // Set the user ID and process register/unregister logic if (recognitionData.IsRegistered()) { userId = Convert.ToString(recognitionData.QueryUserID()); //if (lastUserId == userId) if (flagUserId != userId) { Actions.LoadUser(Convert.ToInt16(userId), 255, "userinview", true); flagUserId = userId; } if (doUnregister) { recognitionData.UnregisterUser(); SaveDatabaseToFile(); doUnregister = false; } } else { if (doRegister) { recognitionData.RegisterUser(); // Capture a jpg image of registered user colorBitmap.Save("image.jpg", System.Drawing.Imaging.ImageFormat.Jpeg); doRegister = false; } else { userId = "Unrecognized"; } } } } else { userId = "No users in view"; flagUserId = "modifyed"; } } // Release resources colorBitmap.Dispose(); sample.color.ReleaseAccess(colorData); sample.color.Dispose(); senseManager.ReleaseFrame(); coords = faceRectangleX.ToString() + " " + faceRectangleY.ToString() + " " + faceRectangleWidth.ToString() + " " + faceRectangleHeight.ToString(); Server.sendMsg(255, "rect", coords, userId); } } catch { Console.WriteLine("ERRO ProcessingThread"); } }
public void SimplePipeline() { PXCMSenseManager pp = m_form.Session.CreateSenseManager(); if (pp == null) { throw new Exception("PXCMSenseManager null"); } PXCMCaptureManager captureMgr = pp.captureManager; if (captureMgr == null) { throw new Exception("PXCMCaptureManager null"); } var selectedRes = m_form.GetCheckedColorResolution(); if (selectedRes != null && !m_form.IsInPlaybackState()) { // Set active camera PXCMCapture.DeviceInfo deviceInfo; m_form.Devices.TryGetValue(m_form.GetCheckedDevice(), out deviceInfo); captureMgr.FilterByDeviceInfo(m_form.GetCheckedDeviceInfo()); // activate filter only live/record mode , no need in playback mode var set = new PXCMCapture.Device.StreamProfileSet { color = { frameRate = selectedRes.Item2, imageInfo = { format = selectedRes.Item1.format, height = selectedRes.Item1.height, width = selectedRes.Item1.width } } }; if (m_form.IsPulseEnabled() && (set.color.imageInfo.width < 1280 || set.color.imageInfo.height < 720)) { captureMgr.FilterByStreamProfiles(PXCMCapture.StreamType.STREAM_TYPE_COLOR, 1280, 720, 0); } else { captureMgr.FilterByStreamProfiles(set); } } // Set Source & Landmark Profile Index if (m_form.IsInPlaybackState()) { //pp.captureManager.FilterByStreamProfiles(null); captureMgr.SetFileName(m_form.GetFileName(), false); captureMgr.SetRealtime(false); } else if (m_form.GetRecordState()) { captureMgr.SetFileName(m_form.GetFileName(), true); } // Set Module pp.EnableFace(); PXCMFaceModule faceModule = pp.QueryFace(); if (faceModule == null) { Debug.Assert(faceModule != null); return; } PXCMFaceConfiguration moduleConfiguration = faceModule.CreateActiveConfiguration(); if (moduleConfiguration == null) { Debug.Assert(moduleConfiguration != null); return; } var checkedProfile = m_form.GetCheckedProfile(); var mode = m_form.FaceModesMap.First(x => x.Value == checkedProfile).Key; moduleConfiguration.SetTrackingMode(mode); moduleConfiguration.strategy = PXCMFaceConfiguration.TrackingStrategyType.STRATEGY_RIGHT_TO_LEFT; moduleConfiguration.detection.maxTrackedFaces = m_form.NumDetection; moduleConfiguration.landmarks.maxTrackedFaces = m_form.NumLandmarks; moduleConfiguration.pose.maxTrackedFaces = m_form.NumPose; PXCMFaceConfiguration.ExpressionsConfiguration econfiguration = moduleConfiguration.QueryExpressions(); if (econfiguration == null) { throw new Exception("ExpressionsConfiguration null"); } econfiguration.properties.maxTrackedFaces = m_form.NumExpressions; econfiguration.EnableAllExpressions(); moduleConfiguration.detection.isEnabled = m_form.IsDetectionEnabled(); moduleConfiguration.landmarks.isEnabled = m_form.IsLandmarksEnabled(); moduleConfiguration.pose.isEnabled = m_form.IsPoseEnabled(); if (m_form.IsExpressionsEnabled()) { econfiguration.Enable(); } PXCMFaceConfiguration.PulseConfiguration pulseConfiguration = moduleConfiguration.QueryPulse(); if (pulseConfiguration == null) { throw new Exception("pulseConfiguration null"); } pulseConfiguration.properties.maxTrackedFaces = m_form.NumPulse; if (m_form.IsPulseEnabled()) { pulseConfiguration.Enable(); } qrecognition = moduleConfiguration.QueryRecognition(); if (qrecognition == null) { throw new Exception("PXCMFaceConfiguration.RecognitionConfiguration null"); } if (m_form.IsRecognitionChecked()) { qrecognition.Enable(); #region 臉部辨識資料庫讀取 if (File.Exists(DatabasePath)) { m_form.UpdateStatus("正在讀取資料庫", MainForm.Label.StatusLabel); List <RecognitionFaceData> faceData = null; FaceDatabaseFile.Load(DatabasePath, ref faceData, ref NameMapping); FaceData = faceData.ToArray(); qrecognition.SetDatabase(FaceData); } #endregion } moduleConfiguration.EnableAllAlerts(); moduleConfiguration.SubscribeAlert(FaceAlertHandler); pxcmStatus applyChangesStatus = moduleConfiguration.ApplyChanges(); m_form.UpdateStatus("Init Started", MainForm.Label.StatusLabel); if (applyChangesStatus < pxcmStatus.PXCM_STATUS_NO_ERROR || pp.Init() < pxcmStatus.PXCM_STATUS_NO_ERROR) { m_form.UpdateStatus("Init Failed", MainForm.Label.StatusLabel); } else { using (PXCMFaceData moduleOutput = faceModule.CreateOutput()) { Debug.Assert(moduleOutput != null); PXCMCapture.Device.StreamProfileSet profiles; PXCMCapture.Device device = captureMgr.QueryDevice(); if (device == null) { throw new Exception("device null"); } device.QueryStreamProfileSet(PXCMCapture.StreamType.STREAM_TYPE_DEPTH, 0, out profiles); CheckForDepthStream(profiles, faceModule); m_form.UpdateStatus("Streaming", MainForm.Label.StatusLabel); m_timer = new FPSTimer(m_form); #region loop while (!m_form.Stopped) { if (pp.AcquireFrame(true) < pxcmStatus.PXCM_STATUS_NO_ERROR) { break; } var isConnected = pp.IsConnected(); DisplayDeviceConnection(isConnected); if (isConnected) { var sample = pp.QueryFaceSample(); if (sample == null) { pp.ReleaseFrame(); continue; } switch (mode) { case PXCMFaceConfiguration.TrackingModeType.FACE_MODE_IR: if (sample.ir != null) { DisplayPicture(sample.ir); } break; default: DisplayPicture(sample.color); break; } moduleOutput.Update(); PXCMFaceConfiguration.RecognitionConfiguration recognition = moduleConfiguration.QueryRecognition(); if (recognition == null) { pp.ReleaseFrame(); continue; } if (recognition.properties.isEnabled) { UpdateRecognition(moduleOutput); } m_form.DrawGraphics(moduleOutput); m_form.UpdatePanel(); } pp.ReleaseFrame(); } #endregion } // moduleConfiguration.UnsubscribeAlert(FaceAlertHandler); // moduleConfiguration.ApplyChanges(); m_form.UpdateStatus("Stopped", MainForm.Label.StatusLabel); } #region 儲存臉部辨識資訊檔案 if (DatabaseChanged) { FaceDatabaseFile.Save(DatabasePath, FaceData.ToList(), NameMapping); } #endregion var dbm = new FaceDatabaseManager(pp); moduleConfiguration.Dispose(); pp.Close(); pp.Dispose(); }
public void StreamColorDepth(String scanType) /* Stream Color and Depth Synchronously or Asynchronously */ { bool sts = true; PXCM3DScan.Configuration scan_config = new PXCM3DScan.Configuration(); String statusString; /* Create an instance of the PXCSenseManager interface */ PXCMSenseManager pp = PXCMSenseManager.CreateInstance(); if (pp == null) { form.UpdateStatus("Failed to create sense manager"); return; } if (pp.captureManager == null) { form.UpdateStatus("Capture manager does not exist"); return; } if (!form.IsModeLive()) { pp.captureManager.SetFileName(form.GetFileName(), form.IsModeRecord()); } /* Set Input Source */ PXCMCapture.DeviceInfo dinfo2 = form.GetCheckedDevice(); if (form.IsModeLive() || form.IsModeRecord()) { pp.captureManager.FilterByDeviceInfo(dinfo2); } if (form.IsModeRecord()) { // Delay recording frames until the scan starts pp.captureManager.SetPause(true); } else if (!form.IsModeLive()) { // Disable real-time mode if we are playing back a file // to ensure that frames are not skipped. pp.captureManager.SetRealtime(false); } /* Set Color & Depth Resolution */ PXCMCapture.Device.StreamProfile cinfo = form.GetColorConfiguration(); if (cinfo.imageInfo.format != 0) { Single cfps = cinfo.frameRate.max; pp.EnableStream(PXCMCapture.StreamType.STREAM_TYPE_COLOR, cinfo.imageInfo.width, cinfo.imageInfo.height, cfps); } PXCMCapture.Device.StreamProfile dinfo = form.GetDepthConfiguration(); if (dinfo.imageInfo.format != 0) { Single dfps = dinfo.frameRate.max; pp.EnableStream(PXCMCapture.StreamType.STREAM_TYPE_DEPTH, dinfo.imageInfo.width, dinfo.imageInfo.height, dfps); } /* Initialization */ FPSTimer timer = new FPSTimer(form); if (form.IsModeLive()) { form.UpdateStatus("Initializing..."); } /* Enable the 3D Scan video module */ pxcmStatus result = pp.Enable3DScan(); if (result != pxcmStatus.PXCM_STATUS_NO_ERROR) { pp.Close(); pp.Dispose(); form.UpdateStatus("Enable3DScan() returned " + result); return; } /* Initialize the camera system */ result = pp.Init(); form.UpdateStatus(""); device = pp.captureManager.device; if (result >= pxcmStatus.PXCM_STATUS_NO_ERROR && device != null) { bool bAutoExpAndWBChanged = false; bool bAutoExposureEnabled = true; bool bAutoWhiteBalanceEnabled = true; /* Setup the scanning configuration */ if (scanType == "Object") { scan_config.mode = PXCM3DScan.ScanningMode.OBJECT_ON_PLANAR_SURFACE_DETECTION; } else if (scanType == "Face") { scan_config.mode = PXCM3DScan.ScanningMode.FACE; } else if (scanType == "Body") { scan_config.mode = PXCM3DScan.ScanningMode.BODY; } else if (scanType == "Head") { scan_config.mode = PXCM3DScan.ScanningMode.HEAD; } else if (scanType == "Full") { scan_config.mode = PXCM3DScan.ScanningMode.VARIABLE; } /* Select the Targeting Options */ scan_config.options = PXCM3DScan.ReconstructionOption.NONE; if (form.isSolidificationSelected()) { scan_config.options |= (PXCM3DScan.ReconstructionOption.SOLIDIFICATION); } if (form.isTextureSelected()) { scan_config.options |= (PXCM3DScan.ReconstructionOption.TEXTURE); } if (form.isLandmarksSelected()) { scan_config.options |= (PXCM3DScan.ReconstructionOption.LANDMARKS); } //scan_config.useMarker = form.isUseMarkerChecked(); scan_config.flopPreviewImage = form.isFlopPreviewImageSelected(); /* Try to initialize the scanning system */ PXCM3DScan scan = pp.Query3DScan(); sts = false; if (scan == null) { form.UpdateStatus("3DScan module not found."); } else { result = scan.SetConfiguration(scan_config); if (result < pxcmStatus.PXCM_STATUS_NO_ERROR) { scan.Dispose(); // Show the configuration related error code switch (result) { case pxcmStatus.PXCM_STATUS_FEATURE_UNSUPPORTED: form.UpdateStatus("Configuration not supported."); break; case pxcmStatus.PXCM_STATUS_ITEM_UNAVAILABLE: form.UpdateStatus("Face module not found."); break; default: form.UpdateStatus("SetConfiguration returned an error."); break; } } else { sts = true; } } // Conditionally finish the initialization and enter the main loop if (sts == true) { // Subscribe to recieve range and tracking alerts scan.Subscribe(OnAlert); Projection projection = new Projection( pp.session, device, dinfo.imageInfo); Boolean bScanning = false; Boolean bPlaybackStarted = false; form.Invoke(new Action(() => form.SetButtonState(sample3dscan.cs.MainForm.ButtonState.Ce_SSd))); while (form.reconstruct_requested || !form.GetStopState()) { if (form.GetScanRequested()) /* one time latch */ { form.Invoke(new Action(() => form.SetScanRequested(false))); // Delay recording frames until the start of the scan is requested if (form.IsModeRecord()) { pp.captureManager.SetPause(false); } // If the max tri/vert controls are enabled, // use the set values. Otherwise, disabled decimation // by setting the values to zero. /* * scan_config.maxTriangles = form.getMaxTrianglesEnabledChecked() * ? form.getMaxTriangles() : 0; * scan_config.maxVertices = form.getMaxVerticesEnabledChecked() * ? form.getMaxVertices() : 0; */ // Request that the scan starts as soon as possible scan_config.startScan = true; scan.SetConfiguration(scan_config); /* Update the status bar to help users understand what the detector is looking for */ if (form.IsModeLive()) { if (scan_config.mode == PXCM3DScan.ScanningMode.OBJECT_ON_PLANAR_SURFACE_DETECTION) { form.UpdateStatus("Object not detected. Place object on flat surface in center of view."); } } } else if (form.reconstruct_requested) { sts = SaveMesh(scan); } /* Get preview image from the 3D Scan video module */ if (!form.GetStopState()) { /* Wait until a frame is ready: Synchronized or Asynchronous */ if (pp.AcquireFrame() < pxcmStatus.PXCM_STATUS_NO_ERROR) { projection.Dispose(); if (!form.IsModeLive()) { form.Invoke(new Action(() => form.EndScan())); sts = SaveMesh(scan); } break; } /* Get preview image from the 3D Scan video module */ PXCMImage preview_image = scan.AcquirePreviewImage(); pp.ReleaseFrame(); /* Display Image and Status */ if (preview_image != null) { form.SetBitmap(preview_image); if (scan.IsScanning()) { statusString = "Scanning"; timer.Tick(statusString + " "); if (bScanning == false) // Lazy initializer { bScanning = true; // One way latch // Once the scanning process starts, we want to enable the Reconstruct button form.Invoke(new Action(() => form.SetButtonState(sample3dscan.cs.MainForm.ButtonState.Ce_ESe))); // Object, head and body scanning with a rear facing camera involves walking // around the target, which effectivly exposes the camera to the full // environment, similar to a panorama. To avoid undesirable color // inconsistencies (realted to the response of the auto-exposure/wb changes), // it usually works best to disable them. // Note that these property changes are restored (below). if (device.deviceInfo.orientation == PXCMCapture.DeviceOrientation.DEVICE_ORIENTATION_REAR_FACING && scan_config.mode != PXCM3DScan.ScanningMode.FACE && form.IsModeLive()) { bAutoExpAndWBChanged = true; bAutoExposureEnabled = device.QueryColorAutoExposure(); device.SetColorAutoExposure(false); bAutoWhiteBalanceEnabled = device.QueryColorAutoWhiteBalance(); device.SetColorAutoWhiteBalance(false); } } } else { if (!form.IsModeLive() && !form.IsModeRecord()) // In playback mode, automatically request the scan { if (bPlaybackStarted == false) // Lazy initializer { bPlaybackStarted = true; // One way latch form.scan_requested = true; form.Invoke(new Action(() => form.StartScanning(false))); form.Invoke(new Action(() => form.SetButtonState(sample3dscan.cs.MainForm.ButtonState.Ce_ESe))); } } else { if (!form.GetStopState()) { if (isScanReady(form.landmarksChecked())) { form.Invoke(new Action(() => form.EnableReconstruction(true))); } else { form.Invoke(new Action(() => form.EnableReconstruction(false))); } } } } preview_image.Dispose(); } } } projection.Dispose(); scan.Dispose(); } // Restore the default camera properties if (bAutoExpAndWBChanged) { device.SetColorAutoExposure(bAutoExposureEnabled); device.SetColorAutoWhiteBalance(bAutoWhiteBalanceEnabled); } device.Dispose(); device = null; } else { try { form.UpdateStatus(result + ""); } catch { } sts = false; } if (sts) { try { form.UpdateStatus(""); } catch { } } pp.Close(); pp.Dispose(); try { form.Invoke(new Action(() => form.ResetStop())); } catch { } }
private void ProcessingThread() { string nameColor, nameDepth, nameIr, file, folder; int width = 640; int height = 480; //int lostFrames = 0; int frameIndex = 0; int nframes = 0; PXCMImage color; PXCMImage depth; PXCMImage ir; PXCMImage.ImageData imageColor; PXCMImage.ImageData imageDepth; PXCMImage.ImageData imageIr; WriteableBitmap wbm1, wbm2, wbm3; foreach (var dir in dirs) { if (Directory.EnumerateFileSystemEntries(dir).Any()) { List <string> fileList = new List <string>(Directory.GetFiles(dir, "*.rssdk")); foreach (var input_file in fileList) { //lostFrames = 0; // Create a SenseManager instance sm = PXCMSenseManager.CreateInstance(); // Recording mode: true // Playback mode: false // Settings for playback mode (read rssdk files and extract frames) sm.captureManager.SetFileName(input_file, false); sm.captureManager.SetRealtime(false); nframes = sm.captureManager.QueryNumberOfFrames(); // Select the color stream sm.EnableStream(PXCMCapture.StreamType.STREAM_TYPE_COLOR, width, height, 0); sm.EnableStream(PXCMCapture.StreamType.STREAM_TYPE_DEPTH, width, height); sm.EnableStream(PXCMCapture.StreamType.STREAM_TYPE_IR, width, height); Application.Current.Dispatcher.BeginInvoke(new Action(() => { textBox3.Text = input_file; textBox4.Text = nframes.ToString(); })); sm.Init(); //pxcmStatus status = sm.AcquireFrame(true); while (sm.AcquireFrame(true) >= pxcmStatus.PXCM_STATUS_NO_ERROR) { // Retrieve the sample PXCMCapture.Sample sample = sm.QuerySample(); // Work on the images color = sample.color; depth = sample.depth; ir = sample.ir; frameIndex = sm.captureManager.QueryFrameIndex(); color.AcquireAccess(PXCMImage.Access.ACCESS_READ, PXCMImage.PixelFormat.PIXEL_FORMAT_RGB32, out imageColor); depth.AcquireAccess(PXCMImage.Access.ACCESS_READ, PXCMImage.PixelFormat.PIXEL_FORMAT_DEPTH_RAW, out imageDepth); ir.AcquireAccess(PXCMImage.Access.ACCESS_READ, PXCMImage.PixelFormat.PIXEL_FORMAT_RGB24, out imageIr); //convert it to Bitmap wbm1 = imageColor.ToWritableBitmap(0, color.info.width, color.info.height, 100.0, 100.0); wbm2 = imageDepth.ToWritableBitmap(0, depth.info.width, depth.info.height, 100.0, 100.0); wbm3 = imageIr.ToWritableBitmap(0, ir.info.width, ir.info.height, 100.0, 100.0); //Update current frame Application.Current.Dispatcher.BeginInvoke(new Action(() => { textBox5.Text = frameIndex.ToString(); })); color.ReleaseAccess(imageColor); depth.ReleaseAccess(imageDepth); ir.ReleaseAccess(imageIr); sm.ReleaseFrame(); //sm.Close(); //sm.Dispose(); file = Path.GetFileNameWithoutExtension(input_file); folder = Path.GetFileName(Path.GetDirectoryName(input_file)); nameColor = file + "_color_" + frameIndex + ".png"; nameDepth = file + "_depth_" + frameIndex + ".png"; nameIr = file + "_ir_" + frameIndex + ".png"; CreateThumbnail(folder, nameColor, wbm1); CreateThumbnail(folder, nameDepth, wbm2); CreateThumbnail(folder, nameIr, wbm3); } sm.Dispose(); } } } }
public void SimplePipeline() { PXCMSenseManager pp = m_form.Session.CreateSenseManager(); if (pp == null) { throw new Exception("PXCMSenseManager null"); } // Set Source & Landmark Profile Index PXCMCapture.DeviceInfo info; if (m_form.GetRecordState()) { pp.captureManager.SetFileName(m_form.GetFileName(), true); if (m_form.Devices.TryGetValue(m_form.GetCheckedDevice(), out info)) { pp.captureManager.FilterByDeviceInfo(info); } } else if (m_form.GetPlaybackState()) { pp.captureManager.SetFileName(m_form.GetFileName(), false); PXCMCaptureManager cmanager = pp.QueryCaptureManager(); if (cmanager == null) { throw new Exception("PXCMCaptureManager null"); } cmanager.SetRealtime(false); } else { if (m_form.Devices.TryGetValue(m_form.GetCheckedDevice(), out info)) { pp.captureManager.FilterByDeviceInfo(info); Tuple <PXCMImage.ImageInfo, PXCMRangeF32> selectedRes = m_form.GetCheckedColorResolution(); var set = new PXCMCapture.Device.StreamProfileSet(); set.color.frameRate = selectedRes.Item2; set.color.imageInfo.format = selectedRes.Item1.format; set.color.imageInfo.width = selectedRes.Item1.width; set.color.imageInfo.height = selectedRes.Item1.height; pp.captureManager.FilterByStreamProfiles(set); } } // Set Module pp.EnableFace(); PXCMFaceModule faceModule = pp.QueryFace(); if (faceModule == null) { Debug.Assert(faceModule != null); return; } PXCMFaceConfiguration moduleConfiguration = faceModule.CreateActiveConfiguration(); if (moduleConfiguration == null) { Debug.Assert(moduleConfiguration != null); return; } PXCMFaceConfiguration.TrackingModeType mode = m_form.GetCheckedProfile().Contains("3D") ? PXCMFaceConfiguration.TrackingModeType.FACE_MODE_COLOR_PLUS_DEPTH : PXCMFaceConfiguration.TrackingModeType.FACE_MODE_COLOR; moduleConfiguration.SetTrackingMode(mode); moduleConfiguration.strategy = PXCMFaceConfiguration.TrackingStrategyType.STRATEGY_RIGHT_TO_LEFT; moduleConfiguration.detection.maxTrackedFaces = m_form.NumDetection; moduleConfiguration.landmarks.maxTrackedFaces = m_form.NumLandmarks; moduleConfiguration.pose.maxTrackedFaces = m_form.NumPose; PXCMFaceConfiguration.ExpressionsConfiguration econfiguration = moduleConfiguration.QueryExpressions(); if (econfiguration == null) { throw new Exception("ExpressionsConfiguration null"); } econfiguration.properties.maxTrackedFaces = m_form.NumExpressions; econfiguration.EnableAllExpressions(); moduleConfiguration.detection.isEnabled = m_form.IsDetectionEnabled(); moduleConfiguration.landmarks.isEnabled = m_form.IsLandmarksEnabled(); moduleConfiguration.pose.isEnabled = m_form.IsPoseEnabled(); if (m_form.IsExpressionsEnabled()) { econfiguration.Enable(); } PXCMFaceConfiguration.RecognitionConfiguration qrecognition = moduleConfiguration.QueryRecognition(); if (qrecognition == null) { throw new Exception("PXCMFaceConfiguration.RecognitionConfiguration null"); } if (m_form.IsRecognitionChecked()) { qrecognition.Enable(); } moduleConfiguration.EnableAllAlerts(); moduleConfiguration.SubscribeAlert(FaceAlertHandler); pxcmStatus applyChangesStatus = moduleConfiguration.ApplyChanges(); m_form.UpdateStatus("Init Started", MainForm.Label.StatusLabel); if (applyChangesStatus < pxcmStatus.PXCM_STATUS_NO_ERROR || pp.Init() < pxcmStatus.PXCM_STATUS_NO_ERROR) { m_form.UpdateStatus("Init Failed", MainForm.Label.StatusLabel); } else { using (PXCMFaceData moduleOutput = faceModule.CreateOutput()) { Debug.Assert(moduleOutput != null); PXCMCapture.Device.StreamProfileSet profiles; PXCMCaptureManager cmanager = pp.QueryCaptureManager(); if (cmanager == null) { throw new Exception("capture manager null"); } PXCMCapture.Device device = cmanager.QueryDevice(); if (device == null) { throw new Exception("device null"); } device.QueryStreamProfileSet(PXCMCapture.StreamType.STREAM_TYPE_DEPTH, 0, out profiles); CheckForDepthStream(profiles, faceModule); ushort threshold = device.QueryDepthConfidenceThreshold(); int filter_option = device.QueryIVCAMFilterOption(); int range_tradeoff = device.QueryIVCAMMotionRangeTradeOff(); device.SetDepthConfidenceThreshold(1); device.SetIVCAMFilterOption(6); device.SetIVCAMMotionRangeTradeOff(21); if (m_form.IsMirrored()) { device.SetMirrorMode(PXCMCapture.Device.MirrorMode.MIRROR_MODE_HORIZONTAL); } else { device.SetMirrorMode(PXCMCapture.Device.MirrorMode.MIRROR_MODE_DISABLED); } m_form.UpdateStatus("Streaming", MainForm.Label.StatusLabel); m_timer = new FPSTimer(m_form); while (!m_form.Stopped) { if (pp.AcquireFrame(true) < pxcmStatus.PXCM_STATUS_NO_ERROR) { break; } bool isConnected = pp.IsConnected(); DisplayDeviceConnection(isConnected); if (isConnected) { PXCMCapture.Sample sample = pp.QueryFaceSample(); if (sample == null) { pp.ReleaseFrame(); continue; } DisplayPicture(sample.color); moduleOutput.Update(); if (moduleConfiguration.QueryRecognition().properties.isEnabled) { UpdateRecognition(moduleOutput); } m_form.DrawGraphics(moduleOutput); m_form.UpdatePanel(); } pp.ReleaseFrame(); } device.SetDepthConfidenceThreshold(threshold); device.SetIVCAMFilterOption(filter_option); device.SetIVCAMMotionRangeTradeOff(range_tradeoff); } moduleConfiguration.UnsubscribeAlert(FaceAlertHandler); moduleConfiguration.ApplyChanges(); m_form.UpdateStatus("Stopped", MainForm.Label.StatusLabel); } moduleConfiguration.Dispose(); pp.Close(); pp.Dispose(); }
void Update() { //Dynamically Pause/Enable Modules int numberOfEnabledModules = 0; foreach (var option in _senseOptions) { if (option.RefCounter == 0 && option.Enabled) { if (option.ModuleCUID > 0) { SenseManager.PauseModule(option.ModuleCUID, true); } option.Enabled = false; } else if (option.RefCounter > 0 && !option.Enabled) { if (!option.Initialized) { OnDisable(); OnEnable(); Start(); } if (option.ModuleCUID > 0) { SenseManager.PauseModule(option.ModuleCUID, false); } option.Enabled = true; } if (option.Enabled) { numberOfEnabledModules++; } } //Update Speech commands if changed if (_speechCommandsChanged) { UpdateSpeechCommands(); SpeechManager.Reset(); } // Every frame update all the data if (Initialized && numberOfEnabledModules > 0) { _sts = SenseManager.AcquireFrame(true, 0); if (_sts == pxcmStatus.PXCM_STATUS_NO_ERROR) { if (_senseOptions.Find(i => i.ID == SenseOption.SenseOptionID.VideoColorStream).Enabled) { if (ImageRgbOutput != null) { ImageRgbOutput.Dispose(); } if (_captureSample == null) { _captureSample = SenseManager.QuerySample(); } if (_captureSample.color != null) { ImageRgbOutput = _captureSample.color; ImageRgbOutput.AddRef(); } } if (_senseOptions.Find(i => i.ID == SenseOption.SenseOptionID.VideoDepthStream).Enabled || _senseOptions.Find(i => i.ID == SenseOption.SenseOptionID.PointCloud).Enabled) { if (ImageDepthOutput != null) { ImageDepthOutput.Dispose(); } if (_captureSample == null) { _captureSample = SenseManager.QuerySample(); } if (_captureSample.depth != null) { ImageDepthOutput = _captureSample.depth; ImageDepthOutput.AddRef(); /* GZ * if (!_isInitBlob) * { * PXCMImage.ImageInfo info = ImageDepthOutput.QueryInfo(); * BlobExtractor.Init(info); * BlobExtractor.SetMaxBlobs(MaxBlobsToDetect); * _isInitBlob = true; * } */ if (PointCloud == null) { PointCloud = new PXCMPoint3DF32[ImageDepthOutput.info.width * ImageDepthOutput.info.height]; } if (_senseOptions.Find(i => i.ID == SenseOption.SenseOptionID.PointCloud).Enabled) { if (PointCloud == null) { PointCloud = new PXCMPoint3DF32[ImageDepthOutput.info.width * ImageDepthOutput.info.height]; } _sts = Projection.QueryVertices(ImageDepthOutput, PointCloud); } if (_senseOptions.Find(i => i.ID == SenseOption.SenseOptionID.UVMap).Enabled) { if (UvMap == null) { UvMap = new PXCMPointF32[ImageDepthOutput.info.width * ImageDepthOutput.info.height]; } Projection.QueryUVMap(ImageDepthOutput, UvMap); } } } if (_senseOptions.Find(i => i.ID == SenseOption.SenseOptionID.VideoIRStream).Enabled) { if (ImageIROutput != null) { ImageIROutput.Dispose(); } if (_captureSample == null) { _captureSample = SenseManager.QuerySample(); } if (_captureSample.ir != null) { ImageIROutput = _captureSample.ir; ImageIROutput.AddRef(); } } if (_senseOptions.Find(i => i.ID == SenseOption.SenseOptionID.VideoSegmentation).Enabled) { if (Image3DSegmentationOutput != null) { Image3DSegmentationOutput.Dispose(); } PXCM3DSeg seg = SenseManager.Query3DSeg(); if (seg != null) { Image3DSegmentationOutput = seg.AcquireSegmentedImage(); } } if (_senseOptions.Find(i => i.ID == SenseOption.SenseOptionID.Face).Enabled) { FaceModuleOutput.Update(); } if (_senseOptions.Find(i => i.ID == SenseOption.SenseOptionID.Hand).Enabled) { HandDataOutput.Update(); } if (_senseOptions.Find(i => i.ID == SenseOption.SenseOptionID.Blob).Enabled) { BlobDataOutput.Update(); } _captureSample = null; SenseManager.ReleaseFrame(); } //Speech if (_senseOptions.Find(i => i.ID == SenseOption.SenseOptionID.Speech).Enabled) { SpeechManager.QueryRecognizedCommands(out SpeechOutput); } } }
public void StreamColorDepth() /* Stream Color and Depth Synchronously or Asynchronously */ { try { bool sts = true; /* Create an instance of the PXCMSenseManager interface */ PXCMSenseManager sm = PXCMSenseManager.CreateInstance(); if (sm == null) { SetStatus("Failed to create an SDK pipeline object"); return; } /* Optional: if playback or recoridng */ if ((Playback || Record) && File != null) { sm.captureManager.SetFileName(File, Record); } /* Optional: Set Input Source */ if (!Playback && DeviceInfo != null) { sm.captureManager.FilterByDeviceInfo(DeviceInfo); } /* Set Color & Depth Resolution and enable streams */ if (StreamProfileSet != null) { /* Optional: Filter the data based on the request */ sm.captureManager.FilterByStreamProfiles(StreamProfileSet); /* Enable raw data streaming for specific stream types */ for (int s = 0; s < PXCMCapture.STREAM_LIMIT; s++) { PXCMCapture.StreamType st = PXCMCapture.StreamTypeFromIndex(s); PXCMCapture.Device.StreamProfile info = StreamProfileSet[st]; if (info.imageInfo.format != 0) { /* For simple request, you can also use sm.EnableStream(...) */ PXCMVideoModule.DataDesc desc = new PXCMVideoModule.DataDesc(); desc.streams[st].frameRate.min = desc.streams[st].frameRate.max = info.frameRate.max; desc.streams[st].sizeMin.height = desc.streams[st].sizeMax.height = info.imageInfo.height; desc.streams[st].sizeMin.width = desc.streams[st].sizeMax.width = info.imageInfo.width; desc.streams[st].options = info.options; sm.EnableStreams(desc); } } } /* Initialization */ Timer timer = new Timer(); timer.UpdateStatus += UpdateStatus; SetStatus("Init Started"); if (sm.Init() >= pxcmStatus.PXCM_STATUS_NO_ERROR) { /* Reset all properties */ sm.captureManager.device.ResetProperties(PXCMCapture.StreamType.STREAM_TYPE_ANY); /* Set mirror mode */ PXCMCapture.Device.MirrorMode mirror = Mirror ? PXCMCapture.Device.MirrorMode.MIRROR_MODE_HORIZONTAL : PXCMCapture.Device.MirrorMode.MIRROR_MODE_DISABLED; sm.captureManager.device.SetMirrorMode(mirror); SetStatus("Streaming"); while (!Stop) { /* Wait until a frame is ready: Synchronized or Asynchronous */ if (sm.AcquireFrame(Synced).IsError()) { break; } /* Display images */ PXCMCapture.Sample sample = sm.QuerySample(); /* Render streams */ EventHandler <RenderFrameEventArgs> render = RenderFrame; PXCMImage image = null; if (ColorPanel != PXCMCapture.StreamType.STREAM_TYPE_ANY && render != null) { image = sample[ColorPanel]; render(this, new RenderFrameEventArgs(0, image)); } if (DepthPanel != PXCMCapture.StreamType.STREAM_TYPE_ANY && render != null) { render(this, new RenderFrameEventArgs(1, sample[DepthPanel])); } if (IRPanel != PXCMCapture.StreamType.STREAM_TYPE_ANY && render != null) { render(this, new RenderFrameEventArgs(2, sample[IRPanel])); } if (IRLeftPanel != PXCMCapture.StreamType.STREAM_TYPE_ANY && render != null) { render(this, new RenderFrameEventArgs(3, sample[IRLeftPanel])); } if (IRRightPanel != PXCMCapture.StreamType.STREAM_TYPE_ANY && render != null) { render(this, new RenderFrameEventArgs(4, sample[IRRightPanel])); } /* Optional: Set Mirror State */ mirror = Mirror ? PXCMCapture.Device.MirrorMode.MIRROR_MODE_HORIZONTAL : PXCMCapture.Device.MirrorMode.MIRROR_MODE_DISABLED; if (mirror != sm.captureManager.device.QueryMirrorMode()) { sm.captureManager.device.SetMirrorMode(mirror); } /* Optional: Show performance tick */ if (image != null) { timer.Tick(PXCMImage.PixelFormatToString(image.info.format) + " " + image.info.width + "x" + image.info.height); } sm.ReleaseFrame(); } } else { SetStatus("Init Failed"); sts = false; } sm.Dispose(); if (sts) { SetStatus("Stopped"); } } catch (Exception e) { SetStatus(e.GetType().ToString()); } }
private void AcquireThread() { // Stream data while (senseManager.AcquireFrame(true) >= pxcmStatus.PXCM_STATUS_NO_ERROR) { if (click == true) { Thread.Sleep(500); click = false; } // Retrieve the results PXCM3DSeg segmentation = senseManager.Query3DSeg(); if (segmentation != null) { // Get the segmented image PXCMImage segmentedImage = segmentation.AcquireSegmentedImage(); if (segmentedImage != null) { // Access the segmented image data PXCMImage.ImageData segmentedImageData; segmentedImage.AcquireAccess(PXCMImage.Access.ACCESS_READ, PXCMImage.PixelFormat.PIXEL_FORMAT_RGB32, out segmentedImageData); // Lock the backdrop image bitmap bits into system memory and access its data // (Reference: https://msdn.microsoft.com/en-us/library/5ey6h79d%28v=vs.110%29.aspx // (Reference: http://csharpexamples.com/fast-image-processing-c/) Rectangle imageRect = new Rectangle(0, 0, WIDTH, HEIGHT); BitmapData backdropBitmapData = backdrop.LockBits(imageRect, ImageLockMode.ReadWrite, backdrop.PixelFormat); int bytesPerPixel = Bitmap.GetPixelFormatSize(backdropBitmapData.PixelFormat) / 8; int widthInBytes = WIDTH * bytesPerPixel; for (int h = 0; h < HEIGHT; h++) { // Use unsafe keyword to work with pointers for faster image processing // (Required setting: Project -> Properties -> Build -> Allow unsafe code) unsafe { byte *segmentedImagePixel = (byte *)segmentedImageData.planes[0] + h * segmentedImageData.pitches[0]; for (int w = 0; w < widthInBytes; w = w + bytesPerPixel) { byte *backdropPixel = (byte *)backdropBitmapData.Scan0 + (h * backdropBitmapData.Stride); // Substitute segmented background pixels (those containing an alpha channel of zero) // with pixels from the selected backdrop image, if the checkbox is selected if ((segmentedImagePixel[3] <= 0)) { segmentedImagePixel[0] = backdropPixel[w]; segmentedImagePixel[1] = backdropPixel[w + 1]; segmentedImagePixel[2] = backdropPixel[w + 2]; } segmentedImagePixel += 4; } } } // Unlock the backdrop image bitmap bits backdrop.UnlockBits(backdropBitmapData); // Export the image data to a bitmap Bitmap bitmap = segmentedImageData.ToBitmap(0, segmentedImage.info.width, segmentedImage.info.height); // Update the UI by delegating work to the Dispatcher associated with the UI thread this.Dispatcher.Invoke(System.Windows.Threading.DispatcherPriority.Normal, new Action(delegate() { imgBackdrop.Source = ImageUtils.ConvertBitmapToWpf(bitmap); })); // Optionally save a snapshot of the image (captureSnapshot is set in the Capture button's event handler) if (captureSnapshot) { bitmap.Save(path + "MyPic.jpg", System.Drawing.Imaging.ImageFormat.Jpeg); captureSnapshot = false; } segmentedImage.ReleaseAccess(segmentedImageData); segmentedImage.Dispose(); segmentation.Dispose(); bitmap.Dispose(); } } // Resume next frame processing senseManager.ReleaseFrame(); } }