private void Render(Bitmap bitmap, MyTrackedPerson myTrackedPerson) { Dispatcher.Invoke(System.Windows.Threading.DispatcherPriority.Normal, new Action(delegate() { // Update the bitmap image BitmapImage bitmapImage = ConvertBitmap(bitmap); if (bitmapImage != null) { imgStream.Source = bitmapImage; } // Update the data labels lblFacesDetected.Content = string.Format("Faces Detected: {0}", myTrackedPerson.FacesDetected); lblFaceH.Content = string.Format("Face Rect H: {0}", myTrackedPerson.FaceH); lblFaceW.Content = string.Format("Face Rect W: {0}", myTrackedPerson.FaceW); lblFaceX.Content = string.Format("Face Coord X: {0}", myTrackedPerson.FaceX); lblFaceY.Content = string.Format("Face Coord Y: {0}", myTrackedPerson.FaceY); lblFaceDepth.Content = string.Format("Face Depth: {0}", myTrackedPerson.FaceDepth); lblNumberPersons.Content = string.Format("Persons Detected: {0}", myTrackedPerson.PersonsDetected); lblPersonH.Content = string.Format("Person Rect H: {0}", myTrackedPerson.H); lblPersonW.Content = string.Format("Person Rect W: {0}", myTrackedPerson.W); lblPersonX.Content = string.Format("Person Coord X: {0}", myTrackedPerson.X); lblPersonY.Content = string.Format("Person Coord Y: {0}", myTrackedPerson.Y); // Show or hide the markers if (chkShowMarkers.IsChecked == true) { if (myTrackedPerson.FacesDetected == 1) { rectFaceMarker.Height = myTrackedPerson.FaceH; rectFaceMarker.Width = myTrackedPerson.FaceW; Canvas.SetLeft(rectFaceMarker, myTrackedPerson.FaceX); Canvas.SetTop(rectFaceMarker, myTrackedPerson.FaceY); rectFaceMarker.Visibility = Visibility.Visible; } else { rectFaceMarker.Visibility = Visibility.Hidden; } if (myTrackedPerson.PersonsDetected == 1) { rectPersonMarker.Height = myTrackedPerson.H; rectPersonMarker.Width = myTrackedPerson.W; Canvas.SetLeft(rectPersonMarker, myTrackedPerson.X); Canvas.SetTop(rectPersonMarker, myTrackedPerson.Y); rectPersonMarker.Visibility = Visibility.Visible; } else { rectPersonMarker.Visibility = Visibility.Hidden; } } else { rectFaceMarker.Visibility = Visibility.Hidden; rectPersonMarker.Visibility = Visibility.Hidden; } })); }
private void Update() { // Start AcquireFrame-ReleaseFrame loop while (sm.AcquireFrame(true) >= pxcmStatus.PXCM_STATUS_NO_ERROR) { // Acquire color image data PXCMCapture.Sample sample = sm.QuerySample(); Bitmap colorBitmap; PXCMImage.ImageData colorData; sample.color.AcquireAccess(PXCMImage.Access.ACCESS_READ, PXCMImage.PixelFormat.PIXEL_FORMAT_RGB32, out colorData); colorBitmap = colorData.ToBitmap(0, sample.color.info.width, sample.color.info.height); // Create an instance of MyTrackedPerson MyTrackedPerson myTrackedPerson = new MyTrackedPerson(); // Acquire person tracking data personData = personModule.QueryOutput(); myTrackedPerson.PersonsDetected = personData.QueryNumberOfPeople(); if (myTrackedPerson.PersonsDetected == 1) { // person track data PXCMPersonTrackingData.Person trackedPerson = personData.QueryPersonData(PXCMPersonTrackingData.AccessOrderType.ACCESS_ORDER_BY_ID, 0); PXCMPersonTrackingData.PersonTracking trackedPersonData = trackedPerson.QueryTracking(); PXCMPersonTrackingData.BoundingBox2D personBox = trackedPersonData.Query2DBoundingBox(); myTrackedPerson.X = personBox.rect.x; myTrackedPerson.Y = personBox.rect.y; myTrackedPerson.H = personBox.rect.h; myTrackedPerson.W = personBox.rect.w; // Acquire face tracking data faceData.Update(); myTrackedPerson.FacesDetected = faceData.QueryNumberOfDetectedFaces(); if (myTrackedPerson.FacesDetected == 1) { PXCMFaceData.Face face = faceData.QueryFaceByIndex(0); PXCMFaceData.DetectionData faceDetectionData = face.QueryDetection(); PXCMRectI32 faceRectangle; faceDetectionData.QueryBoundingRect(out faceRectangle); myTrackedPerson.FaceH = faceRectangle.h; myTrackedPerson.FaceW = faceRectangle.w; myTrackedPerson.FaceX = faceRectangle.x; myTrackedPerson.FaceY = faceRectangle.y; float faceDepth; faceDetectionData.QueryFaceAverageDepth(out faceDepth); myTrackedPerson.FaceDepth = faceDepth; //save image if (doit == true) { colorBitmap.Save("myBitmap" + imgID + ".bmp"); doit = false; stopwatch.Start(); } else if (stopwatch.Elapsed.Seconds > 10) { imgID++; doit = true; stopwatch.Reset(); } } //IRobotCreate.SetDrive(40, 40); //my.servoNeck.setTargetPosition(1500); my.servos.move(Behaviour.neck, 100);//change because new servo class if ((0 < (myTrackedPerson.X)) && ((myTrackedPerson.X) <= 120)) { //my.servoShoulder.setTargetPosition(1300); my.servos.move(Behaviour.neck, 90); IRobotCreate.SetDrive(20, -20); } else if ((120 < (myTrackedPerson.X)) && ((myTrackedPerson.X) < 310)) { // follow up based on face depth //my.servoShoulder.setTargetPosition(1500); my.servos.move(Behaviour.head, 100);//change because new servo class float depth = myTrackedPerson.FaceDepth - (int)myTrackedPerson.FaceDepth % 100; if (myTrackedPerson.FacesDetected == 1 && depth < 1750 && depth > 1400) { IRobotCreate.SetDrive(0, 0); } else if (myTrackedPerson.FacesDetected == 1 && depth < 1400) { IRobotCreate.SetDrive(-100, -100); } else if (myTrackedPerson.FacesDetected == 1 && depth > 1750) { IRobotCreate.SetDrive(100, 100); } } else if (310 <= (myTrackedPerson.X)) { //my.servoShoulder.setTargetPosition(1700); my.servos.move(Behaviour.head, 120);//change because new servo class IRobotCreate.SetDrive(-20, 20); } } //my.servoNeck.SetSpeed(40); // my.servoShoulder.SetSpeed(40); my.servos.setSpeed(Behaviour.neck, 100); //change because new servo class my.servos.setSpeed(Behaviour.head, 100); //change because new servo class // Update UI Render(colorBitmap, myTrackedPerson); // Release resources colorBitmap.Dispose(); sample.color.ReleaseAccess(colorData); sm.ReleaseFrame(); } }