コード例 #1
0
ファイル: MainWindow.xaml.cs プロジェクト: aanipuna/censarv2
        private void Update()
        {
            // Start AcquireFrame-ReleaseFrame loop
            while (sm.AcquireFrame(true) >= pxcmStatus.PXCM_STATUS_NO_ERROR)
            {
                // Acquire color image data
                PXCMCapture.Sample  sample = sm.QuerySample();
                Bitmap              colorBitmap;
                PXCMImage.ImageData colorData;
                sample.color.AcquireAccess(PXCMImage.Access.ACCESS_READ, PXCMImage.PixelFormat.PIXEL_FORMAT_RGB32, out colorData);
                colorBitmap = colorData.ToBitmap(0, sample.color.info.width, sample.color.info.height);

                // Create an instance of MyTrackedPerson
                MyTrackedPerson myTrackedPerson = new MyTrackedPerson();

                // Acquire person tracking data
                personData = personModule.QueryOutput();
                myTrackedPerson.PersonsDetected = personData.QueryNumberOfPeople();

                if (myTrackedPerson.PersonsDetected == 1)
                {
                    // person track data
                    PXCMPersonTrackingData.Person         trackedPerson     = personData.QueryPersonData(PXCMPersonTrackingData.AccessOrderType.ACCESS_ORDER_BY_ID, 0);
                    PXCMPersonTrackingData.PersonTracking trackedPersonData = trackedPerson.QueryTracking();
                    PXCMPersonTrackingData.BoundingBox2D  personBox         = trackedPersonData.Query2DBoundingBox();
                    myTrackedPerson.X = personBox.rect.x;
                    myTrackedPerson.Y = personBox.rect.y;
                    myTrackedPerson.H = personBox.rect.h;
                    myTrackedPerson.W = personBox.rect.w;

                    // Acquire face tracking data
                    faceData.Update();
                    myTrackedPerson.FacesDetected = faceData.QueryNumberOfDetectedFaces();

                    if (myTrackedPerson.FacesDetected == 1)
                    {
                        PXCMFaceData.Face          face = faceData.QueryFaceByIndex(0);
                        PXCMFaceData.DetectionData faceDetectionData = face.QueryDetection();
                        PXCMRectI32 faceRectangle;
                        faceDetectionData.QueryBoundingRect(out faceRectangle);
                        myTrackedPerson.FaceH = faceRectangle.h;
                        myTrackedPerson.FaceW = faceRectangle.w;
                        myTrackedPerson.FaceX = faceRectangle.x;
                        myTrackedPerson.FaceY = faceRectangle.y;
                        float faceDepth;
                        faceDetectionData.QueryFaceAverageDepth(out faceDepth);
                        myTrackedPerson.FaceDepth = faceDepth;

                        //save image
                        if (doit == true)
                        {
                            colorBitmap.Save("myBitmap" + imgID + ".bmp");
                            doit = false;
                            stopwatch.Start();
                        }
                        else if (stopwatch.Elapsed.Seconds > 10)
                        {
                            imgID++;
                            doit = true;
                            stopwatch.Reset();
                        }
                    }

                    //IRobotCreate.SetDrive(40, 40);
                    //my.servoNeck.setTargetPosition(1500);
                    my.servos.move(Behaviour.neck, 100);//change because new servo class

                    if ((0 < (myTrackedPerson.X)) && ((myTrackedPerson.X) <= 120))
                    {
                        //my.servoShoulder.setTargetPosition(1300);
                        my.servos.move(Behaviour.neck, 90);
                        IRobotCreate.SetDrive(20, -20);
                    }
                    else if ((120 < (myTrackedPerson.X)) && ((myTrackedPerson.X) < 310))
                    {
                        // follow up based on face depth
                        //my.servoShoulder.setTargetPosition(1500);
                        my.servos.move(Behaviour.head, 100);//change because new servo class
                        float depth = myTrackedPerson.FaceDepth - (int)myTrackedPerson.FaceDepth % 100;
                        if (myTrackedPerson.FacesDetected == 1 && depth < 1750 && depth > 1400)
                        {
                            IRobotCreate.SetDrive(0, 0);
                        }
                        else if (myTrackedPerson.FacesDetected == 1 && depth < 1400)
                        {
                            IRobotCreate.SetDrive(-100, -100);
                        }
                        else if (myTrackedPerson.FacesDetected == 1 && depth > 1750)
                        {
                            IRobotCreate.SetDrive(100, 100);
                        }
                    }
                    else if (310 <= (myTrackedPerson.X))
                    {
                        //my.servoShoulder.setTargetPosition(1700);
                        my.servos.move(Behaviour.head, 120);//change because new servo class
                        IRobotCreate.SetDrive(-20, 20);
                    }
                }
                //my.servoNeck.SetSpeed(40);
                // my.servoShoulder.SetSpeed(40);
                my.servos.setSpeed(Behaviour.neck, 100); //change because new servo class
                my.servos.setSpeed(Behaviour.head, 100); //change because new servo class

                // Update UI
                Render(colorBitmap, myTrackedPerson);

                // Release resources
                colorBitmap.Dispose();
                sample.color.ReleaseAccess(colorData);
                sm.ReleaseFrame();
            }
        }
コード例 #2
0
        private void Update()
        {
            // Start AcquireFrame-ReleaseFrame loop
            while (sm.AcquireFrame(true) >= pxcmStatus.PXCM_STATUS_NO_ERROR)
            {
                // Acquire color image data
                PXCMCapture.Sample  sample = sm.QuerySample();
                Bitmap              colorBitmap;
                PXCMImage.ImageData colorData;
                sample.color.AcquireAccess(PXCMImage.Access.ACCESS_READ, PXCMImage.PixelFormat.PIXEL_FORMAT_RGB32, out colorData);
                colorBitmap = colorData.ToBitmap(0, sample.color.info.width, sample.color.info.height);

                // Create an instance of MyTrackedPerson
                MyTrackedPerson myTrackedPerson = new MyTrackedPerson();
                MyBlobs         myBlobs         = new MyBlobs();

                // Acquire person tracking data
                personData = personModule.QueryOutput();
                myTrackedPerson.PersonsDetected = personData.QueryNumberOfPeople();

                if (myTrackedPerson.PersonsDetected == 1)
                {
                    PXCMPersonTrackingData.Person         trackedPerson     = personData.QueryPersonData(PXCMPersonTrackingData.AccessOrderType.ACCESS_ORDER_BY_ID, 0);
                    PXCMPersonTrackingData.PersonTracking trackedPersonData = trackedPerson.QueryTracking();
                    PXCMPersonTrackingData.BoundingBox2D  personBox         = trackedPersonData.Query2DBoundingBox();
                    myTrackedPerson.X = personBox.rect.x;
                    myTrackedPerson.Y = personBox.rect.y;
                    myTrackedPerson.H = personBox.rect.h;
                    myTrackedPerson.W = personBox.rect.w;

                    /*
                     * PXCMPersonTrackingData.PersonJoints personJoints = trackedPerson.QuerySkeletonJoints();
                     * PXCMPersonTrackingData.PersonJoints.SkeletonPoint[] skeletonPoints = new PXCMPersonTrackingData.PersonJoints.SkeletonPoint[personJoints.QueryNumJoints()];
                     * trackedPerson.QuerySkeletonJoints().QueryJoints(skeletonPoints);
                     * if (skeletonPoints.Length > 0)
                     *  skeletonPoints[0].GetType();
                     */
                }

                // Acquire face tracking data
                faceData.Update();
                myTrackedPerson.FacesDetected = faceData.QueryNumberOfDetectedFaces();

                if (myTrackedPerson.FacesDetected == 1)
                {
                    PXCMFaceData.Face          face = faceData.QueryFaceByIndex(0);
                    PXCMFaceData.DetectionData faceDetectionData = face.QueryDetection();
                    PXCMRectI32 faceRectangle;
                    faceDetectionData.QueryBoundingRect(out faceRectangle);
                    myTrackedPerson.FaceH = faceRectangle.h;
                    myTrackedPerson.FaceW = faceRectangle.w;
                    myTrackedPerson.FaceX = faceRectangle.x;
                    myTrackedPerson.FaceY = faceRectangle.y;
                    float faceDepth;
                    faceDetectionData.QueryFaceAverageDepth(out faceDepth);
                    myTrackedPerson.FaceDepth = faceDepth;
                }

                blobData.Update();
                int numBlobs = blobData.QueryNumberOfBlobs();
                myBlobs.numBlobs      = numBlobs;
                myBlobs.blobs         = new List <List <PXCMPointI32> >(numBlobs);
                myBlobs.closestPoints = new List <PXCMPoint3DF32>(4);
                for (int i = 0; i < numBlobs; i++)
                {
                    PXCMBlobData.IBlob blob;
                    pxcmStatus         result1 = blobData.QueryBlob(i, PXCMBlobData.SegmentationImageType.SEGMENTATION_IMAGE_DEPTH, PXCMBlobData.AccessOrderType.ACCESS_ORDER_NEAR_TO_FAR, out blob);
                    if (result1 == pxcmStatus.PXCM_STATUS_NO_ERROR)
                    {
                        PXCMPoint3DF32 closestPoint = blob.QueryExtremityPoint(PXCMBlobData.ExtremityType.EXTREMITY_CLOSEST);
                        myBlobs.closestPoints.Add(closestPoint);

                        int numContours = blob.QueryNumberOfContours();
                        if (numContours > 0)
                        {
                            // only deal with outer contour
                            for (int j = 0; j < numContours; j++)
                            {
                                PXCMBlobData.IContour contour;
                                pxcmStatus            result2 = blob.QueryContour(j, out contour);
                                if (result2 == pxcmStatus.PXCM_STATUS_NO_ERROR)
                                {
                                    if (contour.IsOuter())
                                    {
                                        PXCMPointI32[] points;
                                        pxcmStatus     result3 = contour.QueryPoints(out points);
                                        if (result3 == pxcmStatus.PXCM_STATUS_NO_ERROR)
                                        {
                                            int numPoints = points.Length;
                                            myBlobs.blobs.Add(points.ToList <PXCMPointI32>());
                                        }
                                    }
                                }
                            }
                        }
                    }
                }

                // Update UI
                Render(colorBitmap, myTrackedPerson, myBlobs);

                // serialize to json and send all clients

                var personJson = JsonConvert.SerializeObject(myTrackedPerson);
                personSockets.ToList().ForEach(s => s.Send(personJson));

                var blobJson = JsonConvert.SerializeObject(myBlobs);
                blobSockets.ToList().ForEach(s => s.Send(blobJson));

                // deserialize json as follows
                //MyTrackedPerson deserializedProduct = JsonConvert.DeserializeObject<MyTrackedPerson>(json);

                // Release resources
                colorBitmap.Dispose();
                sample.color.ReleaseAccess(colorData);
                sm.ReleaseFrame();
            }
        }