Exemplo n.º 1
0
        public void updateData(PXCMFaceData.Face face)
        {
            if (face == null)
            {
                return;
            }
            PXCMFaceData.LandmarksData ldata = face.QueryLandmarks();
            if (ldata == null)
            {
                return;
            }

            PXCMRectI32 rect;

            face.QueryDetection().QueryBoundingRect(out rect);

            PXCMFaceData.LandmarkPoint[] points;
            ldata.QueryPoints(out points);
            if (points == null || points.Length == 0)
            {
                return;
            }

            for (int i = 0; i < 78; i++)
            {
                landmarksData[i].x = points[i].image.x - rect.x;
                landmarksData[i].y = points[i].image.y - rect.y;
                landmarksData[i].z = points[i].world.z;
            }
        }
Exemplo n.º 2
0
        private void recordDataThread(Object obj)
        {
            Thread.Sleep(1000);
            int framecount = 1;

            while (true)
            {
                Thread.Sleep(34);
                if (data.Count > 0)
                {
                    PXCMFaceData mapedValue_temp = data.FirstOrDefault(x => x.Key == framecount).Value;

                    for (var i = 0; i < mapedValue_temp.QueryNumberOfDetectedFaces(); i++)
                    {
                        PXCMFaceData.Face          face      = data[framecount].QueryFaceByIndex(i);
                        PXCMFaceData.DetectionData detection = face.QueryDetection();
                        if (detection != null)
                        {
                            m_form.insertFeaturesToMongoDB(face, framecount, DateTime.Now.ToUniversalTime().ToString("yyyy-MM-ddTHH:mm:ss.FFFK"));
                            break;//ONLY Collect the first detected face
                        }
                    }

                    data.TryRemove(framecount, out mapedValue_temp);
                    framecount++;
                }
                else
                {
                    m_form.insertFeaturesToMongoDBStop();
                    data.Clear();
                    receivedFrame = 1;
                    recordData.Abort();
                }
            }
        }
Exemplo n.º 3
0
        public void ChangeFace(int faceIndex, PXCMFaceData.Face face, int imageHeight, int imageWidth)
        {
            const int threshold           = 5;
            const int expressionThreshold = 55;
            const int faceTextWidth       = 100;

            m_imageWidth = imageWidth;

            PXCMFaceData.DetectionData fdetectionData = face.QueryDetection();
            m_color = m_colorList[faceIndex % m_colorList.Length];

            if (fdetectionData == null)
            {
                int currentWidth = faceIndex * faceTextWidth;
            }
            else
            {
                fdetectionData.QueryBoundingRect(out m_rectangle);

                m_faceId.X = m_rectangle.x + threshold;
                m_faceId.Y = m_rectangle.y + threshold;

                m_expression.X = (m_rectangle.x + m_rectangle.w + expressionThreshold >= m_imageWidth)
                    ? (m_rectangle.x - expressionThreshold)
                    : (m_rectangle.x + m_rectangle.w + threshold);
                m_expression.Y = m_rectangle.y + threshold;
            }
        }
Exemplo n.º 4
0
        public void DrawLocation(PXCMFaceData.Face face)
        {
            Debug.Assert(face != null);
            if (m_bitmap == null || !Detection.Checked)
            {
                return;
            }

            PXCMFaceData.DetectionData detection = face.QueryDetection();
            if (detection == null)
            {
                return;
            }

            lock (m_bitmapLock)
            {
                using (Graphics graphics = Graphics.FromImage(m_bitmap))
                    using (var pen = new Pen(m_faceTextOrganizer.Colour, 3.0f))
                        using (var brush = new SolidBrush(m_faceTextOrganizer.Colour))
                            using (var font = new Font(FontFamily.GenericMonospace, m_faceTextOrganizer.FontSize, FontStyle.Bold))
                            {
                                graphics.DrawRectangle(pen, m_faceTextOrganizer.RectangleLocation);
                                String faceId = String.Format("Face ID: {0}",
                                                              face.QueryUserID().ToString(CultureInfo.InvariantCulture));
                                graphics.DrawString(faceId, font, brush, m_faceTextOrganizer.FaceIdLocation);
                            }
            }
        }
Exemplo n.º 5
0
        //顔のフレームの更新処理
        private void updateFaceFrame()
        {
            // フレームデータを取得する
            PXCMCapture.Sample sample = senceManager.QuerySample();
            UpdateColorImage(sample.color);

            //SenceManagerモジュールの顔のデータを更新する
            faceData.Update();

            //検出した顔の数を取得する
            int numFaces = faceData.QueryNumberOfDetectedFaces();

            //追加:顔の姿勢情報を格納するための変数を用意する
            PXCMFaceData.PoseEulerAngles[] poseAngle = new PXCMFaceData.PoseEulerAngles[POSE_MAXFACES];



            if (senceManager != null)
            {
                //それぞれの顔ごとに情報取得および描画処理を行う
                for (int i = 0; i < numFaces; ++i)
                {
                    //顔の情報を取得する
                    PXCMFaceData.Face face = faceData.QueryFaceByIndex(i);

                    // 顔の位置を取得:Depthで取得する
                    var detection = face.QueryDetection();
                    if (detection != null)
                    {
                        PXCMRectI32 faceRect;
                        detection.QueryBoundingRect(out faceRect);

                        //顔の位置に合わせて長方形を変更
                        TranslateTransform transform = new TranslateTransform(faceRect.x, faceRect.y);
                        rect[i].Width           = faceRect.w;
                        rect[i].Height          = faceRect.h;
                        rect[i].Stroke          = Brushes.Blue;
                        rect[i].StrokeThickness = 3;
                        rect[i].RenderTransform = transform;

                        //追加:ポーズ(顔の向きを取得):Depth使用時のみ
                        PXCMFaceData.PoseData pose = face.QueryPose();
                        if (pose != null)
                        {
                            //顔の位置に合わせて姿勢情報を表示
                            tb[i, 0].RenderTransform = new TranslateTransform(transform.X, transform.Y - 30);
                            tb[i, 1].RenderTransform = new TranslateTransform(transform.X, transform.Y - 60);
                            tb[i, 2].RenderTransform = new TranslateTransform(transform.X, transform.Y - 90);

                            //追加:顔の姿勢情報(Yaw, Pitch, Roll)の情報
                            pose.QueryPoseAngles(out poseAngle[i]);
                            tb[i, 0].Text = "pitch:" + poseAngle[i].pitch;
                            tb[i, 1].Text = "roll:" + poseAngle[i].roll;
                            tb[i, 2].Text = "yaw:" + poseAngle[i].yaw;
                        }
                    }
                }
            }
        }
        public override void Work(Graphics g)
        {
            data = module.CreateOutput();
            data.Update();
            // Get the number of tracked faces
            Int32 nfaces = data.QueryNumberOfDetectedFaces();

            Console.WriteLine("Number of faces : " + nfaces);
            for (Int32 i = 0; i < nfaces; i++)
            {
                // all faces in the picture

                PXCMFaceData.Face face = data.QueryFaceByIndex(i);

                //face location
                PXCMFaceData.DetectionData ddata = face.QueryDetection();

                // Retrieve the face landmark data instance

                PXCMFaceData.Face          landmark = data.QueryFaceByIndex(i);
                PXCMFaceData.LandmarksData ldata    = landmark.QueryLandmarks();



                // work on DetectionData

                PXCMRectI32 rect;
                ddata.QueryBoundingRect(out rect);

                //draw rect
                Rectangle rectangle = new Rectangle(rect.x, rect.y, rect.w, rect.h); // Convert to Rectangle
                g.DrawRectangle(pen, rectangle);                                     // Draw



                // get the landmark data
                PXCMFaceData.LandmarkPoint[] points;
                ldata.QueryPoints(out points);



                //g.DrawImage(points[0].image,);

                for (Int32 j = 0; j < points.Length; j++)
                {
                    //Point p = new Point();
                    // p = points[0].ToString;
                    Point p = new Point();
                    p.X = (int)points[j].image.x;
                    p.Y = (int)points[j].image.y;

                    g.DrawEllipse(pen, points[j].image.x, points[j].image.y, 2, 2);
                }
            }
            data.Dispose();
        }
Exemplo n.º 7
0
        private void ProcessingThread()
        {
            // Start AcquireFrame/ReleaseFrame loop
            while (senseManager.AcquireFrame(true) >= pxcmStatus.PXCM_STATUS_NO_ERROR)
            {
                // Acquire the color image data
                PXCMCapture.Sample  sample = senseManager.QuerySample();
                Bitmap              colorBitmap;
                PXCMImage.ImageData colorData;
                sample.color.AcquireAccess(PXCMImage.Access.ACCESS_READ, PXCMImage.PixelFormat.PIXEL_FORMAT_RGB24, out colorData);
                colorBitmap = colorData.ToBitmap(0, sample.color.info.width, sample.color.info.height);

                // Get face data
                if (faceData != null)
                {
                    faceData.Update();
                    numFacesDetected = faceData.QueryNumberOfDetectedFaces();

                    if (numFacesDetected > 0)
                    {
                        // Get the first face detected (index 0)
                        PXCMFaceData.Face face = faceData.QueryFaceByIndex(0);

                        // Retrieve face location data
                        PXCMFaceData.DetectionData faceDetectionData = face.QueryDetection();

                        faceDetectionData.QueryFaceAverageDepth(out currentFaceDepth);

                        // Process face recognition data
                        if (face != null)
                        {
                            userId = "Unrecognized";
                        }
                    }
                    else
                    {
                        userId = "No users in view";
                    }
                }

                // Display the color stream and other UI elements
                UpdateUI(colorBitmap);

                // Release resources
                colorBitmap.Dispose();
                sample.color.ReleaseAccess(colorData);
                sample.color.Dispose();

                // Release the frame
                senseManager.ReleaseFrame();
            }
        }
Exemplo n.º 8
0
        //顔のフレームの更新処理
        private void updateFaceFrame()
        {
            // フレームデータを取得する
            PXCMCapture.Sample sample = senceManager.QuerySample();
            UpdateColorImage(sample.color);

            //SenceManagerモジュールの顔のデータを更新する
            faceData.Update();

            //検出した顔の数を取得する
            int numFaces = faceData.QueryNumberOfDetectedFaces();

            if (senceManager != null)
            {
                //それぞれの顔ごとに情報取得および描画処理を行う
                for (int i = 0; i < numFaces; ++i)
                {
                    //顔の情報を取得する
                    PXCMFaceData.Face face = faceData.QueryFaceByIndex(i);

                    // 顔の位置を取得:Depthで取得する
                    var detection = face.QueryDetection();
                    if (detection != null)
                    {
                        PXCMRectI32 faceRect;
                        detection.QueryBoundingRect(out faceRect);

                        //顔の位置に合わせて長方形を変更
                        TranslateTransform transform = new TranslateTransform(faceRect.x, faceRect.y);
                        rect[i].Width           = faceRect.w;
                        rect[i].Height          = faceRect.h;
                        rect[i].Stroke          = Brushes.Blue;
                        rect[i].StrokeThickness = 3;
                        rect[i].RenderTransform = transform;

                        //追加:心拍に関する顔の情報を取得する
                        PXCMFaceData.PulseData pulse = face.QueryPulse();
                        if (pulse != null)
                        {
                            //顔の位置に合わせて心拍数を表示
                            tb[i].RenderTransform = new TranslateTransform(transform.X, transform.Y - 30);

                            //追加:心拍数の表示
                            float hrate = pulse.QueryHeartRate();
                            tb[i].Text = "HeartRate:" + hrate;
                        }
                    }
                }
            }
        }
Exemplo n.º 9
0
        public void ChangeFace(int faceIndex, PXCMFaceData.Face face, int imageHeight, int imageWidth)
        {
            const int threshold           = 5;
            const int expressionThreshold = 55;
            const int faceTextWidth       = 100;
            const int textHeightThreshold = 30;

            m_imageWidth = imageWidth;

            PXCMFaceData.DetectionData fdetectionData = face.QueryDetection();
            m_color = m_colorList[faceIndex % m_colorList.Length];

            if (fdetectionData == null)
            {
                int currentWidth = faceIndex * faceTextWidth;

                m_recognitionId.X = threshold + currentWidth;
                m_recognitionId.Y = threshold;

                m_pose.X = threshold + currentWidth;
                m_pose.Y = m_recognitionId.Y + 3 * threshold;

                m_pulse.X = threshold + currentWidth;
                m_pulse.Y = m_pose.Y + 6 * threshold;

                m_expression.X = threshold + currentWidth;
                m_expression.Y = m_pulse.Y + threshold + textHeightThreshold;
            }
            else
            {
                fdetectionData.QueryBoundingRect(out m_rectangle);

                m_recognitionId.X = m_rectangle.x + threshold;
                m_recognitionId.Y = m_rectangle.y + CalculateDefiniteFontSize() + threshold;

                m_faceId.X = m_rectangle.x + threshold;
                m_faceId.Y = m_rectangle.y + threshold;

                m_pose.X = m_rectangle.x + threshold;
                m_pose.Y = m_rectangle.y + m_rectangle.h - 3 * CalculateDefiniteFontSize() - 2 * threshold;

                m_pulse.X = m_rectangle.x + m_rectangle.w - 10 * CalculateDefiniteFontSize();
                m_pulse.Y = m_faceId.Y;

                m_expression.X = (m_rectangle.x + m_rectangle.w + expressionThreshold >= m_imageWidth)
                    ? (m_rectangle.x - expressionThreshold)
                    : (m_rectangle.x + m_rectangle.w + threshold);
                m_expression.Y = m_rectangle.y + threshold;
            }
        }
Exemplo n.º 10
0
        private void CreateFaceObject(PXCMFaceData.Face face, ref FaceObject fObj)
        {
            PXCMFaceData.DetectionData detection = face.QueryDetection();
            if (detection != null)
            {
                PXCMRectI32 faceRect = new PXCMRectI32();
                detection.QueryBoundingRect(out faceRect);

                PXCMFaceData.RecognitionData recogData = face.QueryRecognition();
                int userID = -1;
                if (recogData != null)
                {
                    userID = recogData.QueryUserID();
                }

                //Registered Face
                if (userID > 0)
                {
                    //Get Face by USER-ID
                    fObj = this.GetRecognizedFaceByID(userID);

                    //Due to pre-loaded DB, FaceObject can be null
                    if (fObj == null)
                    {
                        fObj = new FaceObject();
                    }
                }

                float faceDistance;
                detection.QueryFaceAverageDepth(out faceDistance);
                faceDistance /= 1000;

                Rectangle rect = new Rectangle(faceRect.x, faceRect.y, faceRect.w, faceRect.h);
                //Point faceLoc = faceCamConfig.GetFaceLoc(rect);
                Point faceLoc = faceCamConfig.Location;

                fObj.ID           = userID;
                fObj.boundingBox  = rect;
                fObj.cellLocation = faceLoc;
                fObj.registerTime = DateTime.Now;
                fObj.lastSeen     = DateTime.Now;
                fObj.distance     = faceDistance;
            }
        }
Exemplo n.º 11
0
        public void updateData(PXCMFaceData.Face face)
        {
            if (face == null)
            {
                return;
            }
            PXCMFaceData.LandmarksData ldata = face.QueryLandmarks();
            if (ldata == null)
            {
                return;
            }

            PXCMRectI32 rect;

            face.QueryDetection().QueryBoundingRect(out rect);

            // get the landmark data
            var landmarkGroupTypes = Enum.GetValues(typeof(PXCMFaceData.LandmarksGroupType)).Cast <PXCMFaceData.LandmarksGroupType>();

            int count = 0;

            // 对于每个LandmarkPoint转换成成员变量中的world属性
            foreach (var landmarkGroupType in landmarkGroupTypes)
            {
                PXCMFaceData.LandmarkPoint[] points;

                ldata.QueryPointsByGroup(landmarkGroupType, out points);

                PXCMPoint3DF32[] Point3DArray = new PXCMPoint3DF32[points.Length];
                for (int i = 0; i < points.Length; i++)
                {
                    Point3DArray[i] = points[i].world;
                    Console.WriteLine(String.Join(" ", rect.x, rect.y, rect.w, rect.h));
                    Console.WriteLine(points[i].image.x + "\t" + points[i].image.y + "\t" + points[i].world.z);
                }
                count += points.Length;
                // 将world坐标加进去
                landmarksData.Add(landmarkGroupType, Point3DArray);
            }

            Console.WriteLine(count);
        }
Exemplo n.º 12
0
        private void SaveFeature(PXCMFaceData.Face qface, int frameCount)
        {
            PXCMFaceData.LandmarksData   ldata = qface.QueryLandmarks();
            PXCMFaceData.ExpressionsData edata = qface.QueryExpressions();
            PXCMRectI32 rect;
            // get the landmark data
            var landmarkGroupTypes = Enum.GetValues(typeof(PXCMFaceData.LandmarksGroupType)).Cast <PXCMFaceData.LandmarksGroupType>();

            qface.QueryDetection().QueryBoundingRect(out rect);
            //保存时间戳
            string time = DateTime.Now.ToString("yyyy-MM-dd") + " " + DateTime.Now.ToString("HH:mm:ss:fff");

            sw_feature.Write(frameCount.ToString() + ' ' + time + ' ');
            // 对于每个LandmarkPoint转换成成员变量中的world属性

            PXCMFaceData.LandmarkPoint[] points;
            ldata.QueryPoints(out points);
            for (int i = 0; i < 78; i++)
            {
                //把脸部特征放入数组中
                double position_x = Math.Round(points[i].image.x - rect.x, 4);
                double position_y = Math.Round(points[i].image.y - rect.y, 4);
                double position_z = Math.Round(points[i].world.z, 5);
                sw_feature.Write(position_x.ToString() + ' ' + position_y.ToString() + ' ' + position_z.ToString() + ' ');
            }
            for (int i = 0; i < 22; i++)
            {
                double emotion = 0;
                if (edata != null)
                {
                    PXCMFaceData.ExpressionsData.FaceExpressionResult score;
                    edata.QueryExpression((PXCMFaceData.ExpressionsData.FaceExpression)i, out score);
                    Expression.facialExpressionIndensity[i] = score.intensity;
                    emotion = score.intensity;
                }
                sw_feature.Write(emotion.ToString() + ' ');
            }
            sw_feature.WriteLine();
        }
Exemplo n.º 13
0
        private void TrackFace(PXCMFaceData.Face face)
        {
            PXCMRectI32 rect;

            face.QueryDetection().QueryBoundingRect(out rect);
            var point = new Point3D(rect.x + rect.w / 2, rect.y + rect.h / 2);

            Face.Position = CreatePosition(point, new Point3D());

            PXCMFaceData.LandmarksData landmarksData = face.QueryLandmarks();
            if (landmarksData == null)
            {
                return;
            }
            PXCMFaceData.LandmarkPoint[] facePoints;
            landmarksData.QueryPoints(out facePoints);
            if (facePoints == null)
            {
                return;
            }
            foreach (var item in facePoints)
            {
                switch (item.source.alias)
                {
                case PXCMFaceData.LandmarkType.LANDMARK_UPPER_LIP_CENTER:
                    Face.Mouth.Position = CreatePosition(ToPoint3D(item.image), ToPoint3D(item.world));
                    break;

                case PXCMFaceData.LandmarkType.LANDMARK_EYE_LEFT_CENTER:
                    Face.LeftEye.Position = CreatePosition(ToPoint3D(item.image), ToPoint3D(item.world));
                    break;

                case PXCMFaceData.LandmarkType.LANDMARK_EYE_RIGHT_CENTER:
                    Face.RightEye.Position = CreatePosition(ToPoint3D(item.image), ToPoint3D(item.world));
                    break;
                }
            }
        }
Exemplo n.º 14
0
        private void Update()
        {
            // Start AcquireFrame-ReleaseFrame loop
            while (sm.AcquireFrame(true) >= pxcmStatus.PXCM_STATUS_NO_ERROR)
            {
                // Acquire color image data
                PXCMCapture.Sample  sample = sm.QuerySample();
                Bitmap              colorBitmap;
                PXCMImage.ImageData colorData;
                sample.color.AcquireAccess(PXCMImage.Access.ACCESS_READ, PXCMImage.PixelFormat.PIXEL_FORMAT_RGB32, out colorData);
                colorBitmap = colorData.ToBitmap(0, sample.color.info.width, sample.color.info.height);

                // Create an instance of MyTrackedPerson
                MyTrackedPerson myTrackedPerson = new MyTrackedPerson();

                // Acquire person tracking data
                personData = personModule.QueryOutput();
                myTrackedPerson.PersonsDetected = personData.QueryNumberOfPeople();

                if (myTrackedPerson.PersonsDetected == 1)
                {
                    // person track data
                    PXCMPersonTrackingData.Person         trackedPerson     = personData.QueryPersonData(PXCMPersonTrackingData.AccessOrderType.ACCESS_ORDER_BY_ID, 0);
                    PXCMPersonTrackingData.PersonTracking trackedPersonData = trackedPerson.QueryTracking();
                    PXCMPersonTrackingData.BoundingBox2D  personBox         = trackedPersonData.Query2DBoundingBox();
                    myTrackedPerson.X = personBox.rect.x;
                    myTrackedPerson.Y = personBox.rect.y;
                    myTrackedPerson.H = personBox.rect.h;
                    myTrackedPerson.W = personBox.rect.w;

                    // Acquire face tracking data
                    faceData.Update();
                    myTrackedPerson.FacesDetected = faceData.QueryNumberOfDetectedFaces();

                    if (myTrackedPerson.FacesDetected == 1)
                    {
                        PXCMFaceData.Face          face = faceData.QueryFaceByIndex(0);
                        PXCMFaceData.DetectionData faceDetectionData = face.QueryDetection();
                        PXCMRectI32 faceRectangle;
                        faceDetectionData.QueryBoundingRect(out faceRectangle);
                        myTrackedPerson.FaceH = faceRectangle.h;
                        myTrackedPerson.FaceW = faceRectangle.w;
                        myTrackedPerson.FaceX = faceRectangle.x;
                        myTrackedPerson.FaceY = faceRectangle.y;
                        float faceDepth;
                        faceDetectionData.QueryFaceAverageDepth(out faceDepth);
                        myTrackedPerson.FaceDepth = faceDepth;

                        //save image
                        if (doit == true)
                        {
                            colorBitmap.Save("myBitmap" + imgID + ".bmp");
                            doit = false;
                            stopwatch.Start();
                        }
                        else if (stopwatch.Elapsed.Seconds > 10)
                        {
                            imgID++;
                            doit = true;
                            stopwatch.Reset();
                        }
                    }

                    //IRobotCreate.SetDrive(40, 40);
                    //my.servoNeck.setTargetPosition(1500);
                    my.servos.move(Behaviour.neck, 100);//change because new servo class

                    if ((0 < (myTrackedPerson.X)) && ((myTrackedPerson.X) <= 120))
                    {
                        //my.servoShoulder.setTargetPosition(1300);
                        my.servos.move(Behaviour.neck, 90);
                        IRobotCreate.SetDrive(20, -20);
                    }
                    else if ((120 < (myTrackedPerson.X)) && ((myTrackedPerson.X) < 310))
                    {
                        // follow up based on face depth
                        //my.servoShoulder.setTargetPosition(1500);
                        my.servos.move(Behaviour.head, 100);//change because new servo class
                        float depth = myTrackedPerson.FaceDepth - (int)myTrackedPerson.FaceDepth % 100;
                        if (myTrackedPerson.FacesDetected == 1 && depth < 1750 && depth > 1400)
                        {
                            IRobotCreate.SetDrive(0, 0);
                        }
                        else if (myTrackedPerson.FacesDetected == 1 && depth < 1400)
                        {
                            IRobotCreate.SetDrive(-100, -100);
                        }
                        else if (myTrackedPerson.FacesDetected == 1 && depth > 1750)
                        {
                            IRobotCreate.SetDrive(100, 100);
                        }
                    }
                    else if (310 <= (myTrackedPerson.X))
                    {
                        //my.servoShoulder.setTargetPosition(1700);
                        my.servos.move(Behaviour.head, 120);//change because new servo class
                        IRobotCreate.SetDrive(-20, 20);
                    }
                }
                //my.servoNeck.SetSpeed(40);
                // my.servoShoulder.SetSpeed(40);
                my.servos.setSpeed(Behaviour.neck, 100); //change because new servo class
                my.servos.setSpeed(Behaviour.head, 100); //change because new servo class

                // Update UI
                Render(colorBitmap, myTrackedPerson);

                // Release resources
                colorBitmap.Dispose();
                sample.color.ReleaseAccess(colorData);
                sm.ReleaseFrame();
            }
        }
Exemplo n.º 15
0
        //顔のフレームの更新処理
        private void updateFaceFrame()
        {
            // フレームデータを取得する
            PXCMCapture.Sample sample = senceManager.QuerySample();
            UpdateColorImage(sample.color);

            //SenceManagerモジュールの顔のデータを更新する
            faceData.Update();

            //検出した顔の数を取得する
            int numFaces = faceData.QueryNumberOfDetectedFaces();

            //顔のの表出情報のデータ、および角度のデータの入れ物を用意
            PXCMFaceData.ExpressionsData[] expressionData = new PXCMFaceData.ExpressionsData[EXPRESSION_MAXFACES];
            PXCMFaceData.ExpressionsData.FaceExpressionResult expressionResult;

            if (senceManager != null)
            {
                //それぞれの顔ごとに情報取得および描画処理を行う
                for (int i = 0; i < numFaces; ++i)
                {
                    //顔の情報を取得する
                    PXCMFaceData.Face face = faceData.QueryFaceByIndex(i);

                    // 顔の位置を取得:Depthで取得する
                    var detection = face.QueryDetection();
                    if (detection != null)
                    {
                        PXCMRectI32 faceRect;
                        detection.QueryBoundingRect(out faceRect);

                        //顔の位置に合わせて長方形を変更
                        TranslateTransform transform = new TranslateTransform(faceRect.x, faceRect.y);
                        rect[i].Width           = faceRect.w;
                        rect[i].Height          = faceRect.h;
                        rect[i].Stroke          = Brushes.Blue;
                        rect[i].StrokeThickness = 3;
                        rect[i].RenderTransform = transform;

                        //追加:フェイスデータから顔の表情データの情報を得る
                        expressionData[i] = face.QueryExpressions();

                        if (expressionData[i] != null)
                        {
                            //顔の位置に合わせて姿勢情報を表示
                            tb[i, 0].RenderTransform = new TranslateTransform(transform.X, transform.Y - 30);
                            tb[i, 1].RenderTransform = new TranslateTransform(transform.X, transform.Y - 60);
                            tb[i, 2].RenderTransform = new TranslateTransform(transform.X, transform.Y - 90);

                            //追加:口の開き具合を取得、表示
                            if (expressionData[i].QueryExpression(PXCMFaceData.ExpressionsData.FaceExpression.EXPRESSION_MOUTH_OPEN, out expressionResult))
                            //描画処理
                            {
                                tb[i, 0].Text = "MOUTH_OPEN:" + expressionResult.intensity;
                            }

                            //追加:舌の出し具合を取得、表示
                            if (expressionData[i].QueryExpression(PXCMFaceData.ExpressionsData.FaceExpression.EXPRESSION_TONGUE_OUT, out expressionResult))
                            {
                                //描画処理
                                tb[i, 1].Text = "TONGUE_OUT:" + expressionResult.intensity;
                            }

                            //追加:笑顔の度合を取得、表示
                            if (expressionData[i].QueryExpression(PXCMFaceData.ExpressionsData.FaceExpression.EXPRESSION_SMILE, out expressionResult))
                            {
                                //描画処理
                                tb[i, 2].Text = "SMILE:" + expressionResult.intensity;
                            }
                        }
                    }
                }
            }
        }
Exemplo n.º 16
0
        private void ProcessingThread()
        {
            string videoName, nameColor, nameDepth, nameIr;
            int    width  = 640;
            int    height = 480;

            int       frameIndex      = 0;
            string    formatImageFile = ".png";
            int       nframes         = 0;
            int       lostFrames      = 0;
            string    landmarks       = null;
            long      frameTimeStamp  = 0;
            PXCMImage color;
            PXCMImage depth;
            PXCMImage ir;

            PXCMCapture.Sample    sample;
            PXCMImage.ImageData   imageColor;
            PXCMImage.ImageData   imageDepth;
            PXCMImage.ImageData   imageIr;
            WriteableBitmap       wbm1, wbm2, wbm3;
            Int32Rect             rect2crop;
            PXCMFaceModule        faceModule;
            PXCMFaceConfiguration faceConfig;
            PXCMFaceData          faceData = null;
            //Offset Cropped rectangle
            Offset offset = new Offset(0, 0, 0, 0);

            //For each directory, extract all landmarks or images streams from all videos
            foreach (var dir in dirsSource)
            {
                //If the folder is not empty
                if (Directory.EnumerateFileSystemEntries(dir).Any())
                {
                    dictPaths.TryGetValue(dir, out paths); //This dict contains all source and output dirs
                    List <string> fileList = new List <string>(Directory.GetFiles(dir, "*.rssdk"));
                    //For each video
                    foreach (var inputFile in fileList)
                    {
                        lostFrames = 0;
                        videoName  = inputFile.Split('\\').Last().Split('.')[0];
                        // Create a SenseManager instance
                        sm = PXCMSenseManager.CreateInstance();
                        // Recording mode: true
                        // Playback mode: false
                        // Settings for playback mode (read rssdk files and extract frames)
                        sm.captureManager.SetFileName(inputFile, false);
                        sm.captureManager.SetRealtime(false);
                        nframes = sm.captureManager.QueryNumberOfFrames();

                        //Update in realtime the current extraction
                        Application.Current.Dispatcher.BeginInvoke(new Action(() =>
                        {
                            textBox2.Text = nframes.ToString();
                            textBox3.Text = String.Format("Record: {0}\nVideo: {1}", paths.root, videoName);
                        }));

                        sm.EnableStream(PXCMCapture.StreamType.STREAM_TYPE_COLOR, width, height, 0);
                        sm.EnableStream(PXCMCapture.StreamType.STREAM_TYPE_DEPTH, width, height);
                        sm.EnableStream(PXCMCapture.StreamType.STREAM_TYPE_IR, width, height);

                        //Extract Landmarks
                        sm.EnableFace();
                        faceModule = sm.QueryFace();
                        faceConfig = faceModule.CreateActiveConfiguration();
                        faceConfig.landmarks.maxTrackedFaces = 1;
                        faceConfig.landmarks.isEnabled       = true;
                        faceConfig.detection.maxTrackedFaces = 1;
                        faceConfig.detection.isEnabled       = true;
                        faceConfig.EnableAllAlerts();
                        faceConfig.ApplyChanges();

                        sm.Init();

                        // This string stores all data before saving to csv file
                        landmarks = null;
                        // Start AcquireFrame/ReleaseFrame loop
                        var stopwatch = new Stopwatch();
                        stopwatch.Start();

                        while (sm.AcquireFrame(true) >= pxcmStatus.PXCM_STATUS_NO_ERROR)
                        {
                            // Retrieve face data
                            faceModule = sm.QueryFace();
                            frameIndex = sm.captureManager.QueryFrameIndex();
                            if (faceModule != null)
                            {
                                // Retrieve the most recent processed data
                                faceData = faceModule.CreateOutput();
                                faceData.Update();
                            }
                            if (faceData != null)
                            {
                                Int32 nfaces = faceData.QueryNumberOfDetectedFaces();

                                frameTimeStamp = sm.captureManager.QueryFrameTimeStamp();
                                //PXCMCapture.Sample sample = senseManager.QuerySample();
                                if (nfaces == 0) //If none face was detected, we will consider as a "lost frame"
                                {
                                    lostFrames += 1;
                                }
                                for (Int32 i = 0; i < nfaces; i++)
                                {
                                    //Retrieve the image
                                    sample = sm.QuerySample();
                                    // Work on the images
                                    color = sample.color;
                                    depth = sample.depth;
                                    ir    = sample.ir;

                                    PXCMFaceData.Face          face         = faceData.QueryFaceByIndex(i);
                                    PXCMFaceData.LandmarksData landmarkData = face.QueryLandmarks();
                                    PXCMFaceData.DetectionData ddata        = face.QueryDetection();
                                    PXCMFaceData.PoseData      poseData     = face.QueryPose();
                                    poseData.QueryHeadPosition(out PXCMFaceData.HeadPosition headPosition);
                                    poseData.QueryPoseAngles(out PXCMFaceData.PoseEulerAngles poseEulerAngles);
                                    Debug.WriteLine(headPosition.headCenter.x + " " + headPosition.headCenter.y + " " + headPosition.headCenter.z + " " + poseEulerAngles.pitch + " " + poseEulerAngles.roll + " " + poseEulerAngles.yaw);

                                    //Rectangle coordenates from detected face
                                    ddata.QueryBoundingRect(out PXCMRectI32 rect);

                                    //See the offset struct to define the values
                                    rect2crop = new Int32Rect(rect.x + offset.x, rect.y + offset.y, rect.w + offset.w, rect.h + offset.h);
                                    ddata.QueryFaceAverageDepth(out Single depthDistance);

                                    color.AcquireAccess(PXCMImage.Access.ACCESS_READ, PXCMImage.PixelFormat.PIXEL_FORMAT_RGB32, out imageColor);
                                    depth.AcquireAccess(PXCMImage.Access.ACCESS_READ, PXCMImage.PixelFormat.PIXEL_FORMAT_DEPTH_RAW, out imageDepth);
                                    ir.AcquireAccess(PXCMImage.Access.ACCESS_READ, PXCMImage.PixelFormat.PIXEL_FORMAT_RGB24, out imageIr);

                                    //Convert it to Bitmap
                                    wbm1 = imageColor.ToWritableBitmap(0, color.info.width, color.info.height, 100.0, 100.0);
                                    wbm2 = imageDepth.ToWritableBitmap(0, depth.info.width, depth.info.height, 100.0, 100.0);
                                    wbm3 = imageIr.ToWritableBitmap(0, ir.info.width, ir.info.height, 100.0, 100.0);

                                    color.ReleaseAccess(imageColor);
                                    depth.ReleaseAccess(imageDepth);
                                    ir.ReleaseAccess(imageIr);

                                    nameColor = paths.rgbFolder + "\\" + videoName + "\\" + videoName + "_color_" + frameIndex + formatImageFile;
                                    nameDepth = paths.depthFolder + "\\" + videoName + "\\" + videoName + "_depth_" + frameIndex + formatImageFile;
                                    nameIr    = paths.irFolder + "\\" + videoName + "\\" + videoName + "_ir_" + frameIndex + formatImageFile;

                                    //Crops the face images!
                                    CreateThumbnail(nameColor, new CroppedBitmap(wbm1, rect2crop));
                                    CreateThumbnail(nameDepth, new CroppedBitmap(wbm2, rect2crop));
                                    CreateThumbnail(nameIr, new CroppedBitmap(wbm3, rect2crop));

                                    //Debug.WriteLine((depthDistance /1000 ) + " m" + " " + rect.x + " " + rect.y + " " + rect.w + " " + rect.h);

                                    /*
                                     * x - The horizontal coordinate of the top left pixel of the rectangle.
                                     * y - The vertical coordinate of the top left pixel of the rectangle.
                                     * w - The rectangle width in pixels.
                                     * h -The rectangle height in pixels.*/

                                    if (landmarkData != null)
                                    {
                                        PXCMFaceData.LandmarkPoint[] landmarkPoints;
                                        landmarkData.QueryPoints(out landmarkPoints);

                                        Application.Current.Dispatcher.BeginInvoke(new Action(() => textBox1.Text = frameIndex.ToString()));

                                        landmarks += inputFile.Split('\\').Last() + ";" + frameIndex + ";" + nameColor + ";" + nameDepth + ";" + nameIr + ";" + frameTimeStamp + ";" + depthDistance.ToString("F") + ";" + poseEulerAngles.yaw.ToString("F") + ";" + poseEulerAngles.pitch.ToString("F") + ";" + poseEulerAngles.roll.ToString("F") + ";"; // Begin line with frame info

                                        for (int j = 0; j < landmarkPoints.Length; j++)                                                                                                                                                                                                                                                                    // Writes landmarks coordinates along the line
                                        {
                                            //get world coordinates
                                            landmarks += /*landmarkPoints[j].source.index + ";" +*/ (landmarkPoints[j].world.x * 1000).ToString("F") + ";" + (landmarkPoints[j].world.y * 1000).ToString("F") + ";" + (landmarkPoints[j].world.z * 1000).ToString("F") + ";";
                                        }
                                        for (int j = 0; j < landmarkPoints.Length; j++)
                                        {
                                            //get coordinate of the image pixel
                                            landmarks += /*landmarkPoints[j].confidenceImage + ";" + */ landmarkPoints[j].image.x.ToString("F") + ";" + landmarkPoints[j].image.y.ToString("F") + ";";
                                        }
                                        landmarks += '\n'; // Breaks line after the end of the frame coordinates
                                    }
                                }
                            }
                            // Release the frame
                            if (faceData != null)
                            {
                                faceData.Dispose();
                            }
                            sm.ReleaseFrame();

                            WriteToFile(paths.csvFile, landmarks);
                            landmarks = null;
                        }
                        sm.Dispose();
                        stopwatch.Stop();
                        //Update in realtime the current extraction
                        Application.Current.Dispatcher.BeginInvoke(new Action(() =>
                        {
                            elapsedLabel.Content = String.Format("Elapsed Time: {0} (s)", stopwatch.Elapsed.TotalSeconds.ToString("F"));
                        }));
                    }
                }
            }
        }
Exemplo n.º 17
0
        private void Update()
        {
            Int32 facesDetected = 0;

            /* Int32 faceH = 0;
             * Int32 faceW = 0;
             * Int32 faceX = 0;
             * Int32 faceY = 0;
             */
            float  faceDepth = 0;
            String json      = "";

            // Start AcquireFrame-ReleaseFrame loop
            while (senseManager.AcquireFrame(true) >= pxcmStatus.PXCM_STATUS_NO_ERROR)
            {
                // Acquire color image data
                PXCMCapture.Sample  sample = senseManager.QuerySample();
                Bitmap              colorBitmap;
                PXCMImage.ImageData colorData;
                sample.color.AcquireAccess(PXCMImage.Access.ACCESS_READ, PXCMImage.PixelFormat.PIXEL_FORMAT_RGB32, out colorData);
                colorBitmap = colorData.ToBitmap(0, sample.color.info.width, sample.color.info.height);

                // Acquire face data
                if (faceData != null)
                {
                    faceData.Update();
                    facesDetected = faceData.QueryNumberOfDetectedFaces();

                    if (facesDetected > 0)
                    {
                        // Get the first face detected (index 0)
                        PXCMFaceData.Face face = faceData.QueryFaceByIndex(0);

                        // Retrieve face location data
                        PXCMFaceData.DetectionData faceDetectionData = face.QueryDetection();
                        if (faceDetectionData != null)
                        {
                            PXCMRectI32 faceRectangle;
                            faceDetectionData.QueryBoundingRect(out faceRectangle);
                            faceH = faceRectangle.h;
                            faceW = faceRectangle.w;
                            faceX = faceRectangle.x;
                            faceY = faceRectangle.y;

                            // Get average depth value of detected face
                            faceDetectionData.QueryFaceAverageDepth(out faceDepth);

                            //Console.WriteLine(">>>>>>>Face detected");
                        }
                    }
                }

                // Update UI
                Render(colorBitmap, facesDetected, faceH, faceW, faceX, faceY, faceDepth);



                // Release the color frame
                colorBitmap.Dispose();
                sample.color.ReleaseAccess(colorData);
                senseManager.ReleaseFrame();
            }
        }
Exemplo n.º 18
0
        /// <summary>
        /// 取得臉部圖片並且匯出
        /// </summary>
        /// <param name="moduleOutput"></param>
        public void DrawInformation(PXCMFaceData moduleOutput)
        {
            for (var i = 0; i < moduleOutput.QueryNumberOfDetectedFaces(); i++)
            {
                PXCMFaceData.Face face = moduleOutput.QueryFaceByIndex(i);
                if (face == null)
                {
                    continue;
                }

                #region 臉部追蹤資訊取得
                //取得臉部定位資訊
                PXCMFaceData.DetectionData detection = face.QueryDetection();
                if (detection == null)
                {
                    continue;
                }

                //取得臉部範圍
                PXCMRectI32 range;
                detection.QueryBoundingRect(out range);
                #endregion

                #region 僅顯示FaceId為0者獨立照片
                if (i == 0)
                {
                    lock (PicLock) {
                        FaceImage = new Bitmap(128, 128);
                        using (Graphics g = Graphics.FromImage(FaceImage)) {
                            g.DrawImage(Image,
                                        new Rectangle(0, 0, 128, 128),
                                        new Rectangle(range.x, range.y, range.w, range.h)
                                        , GraphicsUnit.Pixel);
                        }
                    }
                }

                //繪製使用者方框
                lock (PicLock)
                    using (var g = Graphics.FromImage(Image)) {
                        Pen pan = i == 0 ?
                                  (Pen)Pens.Red.Clone() :
                                  (Pen)Pens.Yellow.Clone();
                        Brush brush = i == 0 ?
                                      Brushes.Red :
                                      Brushes.Yellow;
                        pan.Width = 4;
                        g.DrawRectangle(
                            pan,
                            new Rectangle(
                                range.x, range.y,
                                range.w, range.h
                                ));

                        var userId = face.QueryRecognition().QueryUserID();
                        var text   = "未註冊使用者";
                        try {
                            if (userId != -1)
                            {
                                text = UserTable[userId];
                            }
                        } catch { }

                        var size = g.MeasureString(//取得大小
                            "#" + i + " " + text,
                            new Font("Arial", 14));
                        g.FillRectangle(
                            brush,
                            new Rectangle(
                                range.x, range.y,
                                (int)size.Width, 20
                                ));
                        g.DrawString(
                            "#" + i + " " + text,
                            new Font("Arial", 14),
                            Brushes.Black, range.x, range.y);
                    }
                #endregion
            }
        }
Exemplo n.º 19
0
        //---------------------------------------------------------ProcessingThread-----------------------------------------------------------------------------------------------



        private void ProcessingThread()
        {
            /* Start AcquireFrame/ReleaseFrame loop
             * (ADS) DESCREVER O LOOP: verificar qual o retorno da função 'AcquireFrame(true)' e o retorno de 'pxcmStatus.PXCM_STATUS_NO_ERRO'
             * The frame processing is paused between the 'AcquireFrame' function and the next 'ReleaseFrame' function.
             * 'AcquireFrame(true)' Pausa o processamento de frame, lê o frame atual e salva em algum local que é acessado pela função 'QuerySample()'.
             *  Mais abaixo o processamento de frame é liberado com a função 'ReleaseFrame()'
             */
            try
            {
                while (senseManager.AcquireFrame(true) >= pxcmStatus.PXCM_STATUS_NO_ERROR)
                {
                    /* Acquire the color image data
                     * Consulta a sample (imagem salva com a chamada da função 'AcquireFrame(true)')
                     * e Atribui para a variável sample(amostra) (Dados na forma Bruta)
                     */
                    PXCMCapture.Sample sample = senseManager.QuerySample();

                    /* Cria uma variável que é uma estrutura apropriada para receber a um tipo de imagem. No caso a imagem bruta recebida
                     * pela função 'AcquireFrame()' e convertida pela função 'color.AcquireAccess()'
                     */
                    PXCMImage.ImageData colorData;

                    /* Converte a imagem(dados brutos) retornada para Sample e salva na estrutura de
                     * imagem ImageData por meio o ultimo parâmetro da função 'color.AcquireAccess(out colorData)'
                     */
                    sample.color.AcquireAccess(PXCMImage.Access.ACCESS_READ, PXCMImage.PixelFormat.PIXEL_FORMAT_RGB24, out colorData);

                    /* Converte para Bitmap
                     */
                    Bitmap colorBitmap = colorData.ToBitmap(0, sample.color.info.width, sample.color.info.height);

                    // Get face data
                    if (faceData != null)
                    {
                        faceData.Update();
                        numFacesDetected = faceData.QueryNumberOfDetectedFaces();

                        if (numFacesDetected > 0)
                        {
                            // Get the first face detected (index 0)
                            PXCMFaceData.Face face = faceData.QueryFaceByIndex(0);

                            // Retrieve face location data
                            PXCMFaceData.DetectionData faceDetectionData = face.QueryDetection();
                            if (faceDetectionData != null)
                            {
                                PXCMRectI32 faceRectangle;
                                faceDetectionData.QueryBoundingRect(out faceRectangle);
                                faceRectangleHeight = faceRectangle.h;
                                faceRectangleWidth  = faceRectangle.w;
                                faceRectangleX      = faceRectangle.x;
                                faceRectangleY      = faceRectangle.y;

                                //int faceRectangleX2 = (faceRectangleX - 510) * -1;
                            }

                            // Process face recognition data
                            if (face != null)
                            {
                                // Retrieve the recognition data instance
                                recognitionData = face.QueryRecognition();

                                // Set the user ID and process register/unregister logic
                                if (recognitionData.IsRegistered())
                                {
                                    userId = Convert.ToString(recognitionData.QueryUserID());

                                    if (flagUserId != userId)
                                    {
                                        Actions.LoadUser(Convert.ToInt16(userId));
                                        flagUserId = userId;
                                    }

                                    if (doUnregister)
                                    {
                                        recognitionData.UnregisterUser();
                                        doUnregister = false;
                                    }
                                }
                                else
                                {
                                    if (doRegister)
                                    {
                                        recognitionData.RegisterUser();

                                        // Capture a jpg image of registered user
                                        colorBitmap.Save("image.jpg", System.Drawing.Imaging.ImageFormat.Jpeg);

                                        doRegister = false;
                                    }
                                    else
                                    {
                                        userId = "Unrecognized";
                                    }
                                }
                            }
                        }
                        else
                        {
                            userId = "No users in view";
                        }
                    }

                    // Display the color stream and other UI elements
                    UpdateUI(colorBitmap);

                    // Release resources
                    colorBitmap.Dispose();
                    sample.color.ReleaseAccess(colorData);
                    sample.color.Dispose();

                    /* Release the frame
                     * 'ReleaseFrame' libera o bloqueio sobre o quadro atual. O processamento de frame continua.
                     */
                    senseManager.ReleaseFrame();

                    coords = faceRectangleX.ToString() + " " + faceRectangleY.ToString() + " " + faceRectangleWidth.ToString() + " " + faceRectangleHeight.ToString();
                    Server.sendMsg(255, "rect", coords, userId);
                }
            }
            catch
            {
                Console.WriteLine("ERRORRRRRRRRRRRRRRRRRRRRRR");
            }
        }
Exemplo n.º 20
0
        //顔のフレームの更新処理
        private void updateFaceFrame()
        {
            // フレームデータを取得する
            PXCMCapture.Sample sample = senceManager.QuerySample();
            UpdateColorImage(sample.color);

            //SenceManagerモジュールの顔のデータを更新する
            faceData.Update();

            //検出した顔の数を取得する
            int numFaces = faceData.QueryNumberOfDetectedFaces();

            if (senceManager != null)
            {
                //それぞれの顔ごとに情報取得および描画処理を行う
                for (int i = 0; i < numFaces; ++i)
                {
                    //顔の情報を取得する
                    PXCMFaceData.Face face = faceData.QueryFaceByIndex(i);

                    // 顔の位置を取得:Depthで取得する
                    var detection = face.QueryDetection();
                    int face_x    = 0;
                    int face_y    = 0;

                    if (detection != null)
                    {
                        PXCMRectI32 faceRect;
                        detection.QueryBoundingRect(out faceRect);

                        //追加:顔の位置を格納するための変数を用意する
                        face_x = faceRect.x;
                        face_y = faceRect.y;

                        //顔の位置に合わせて長方形を変更
                        TranslateTransform transform = new TranslateTransform(faceRect.x, faceRect.y);
                        rect[i].Width           = faceRect.w;
                        rect[i].Height          = faceRect.h;
                        rect[i].Stroke          = Brushes.Blue;
                        rect[i].StrokeThickness = 3;
                        rect[i].RenderTransform = transform;
                    }


                    //追加:顔識別の結果を格納するための変数を用意する
                    rdata = face.QueryRecognition();

                    if (rdata.IsRegistered())
                    {
                        //追加:識別したIDかどうかを確認する
                        int uid = rdata.QueryUserID();
                        if (uid != -1)
                        {
                            {
                                faceID_tb[i].Text            = "Recognition:" + uid;
                                faceID_tb[i].RenderTransform = new TranslateTransform(face_x, face_y - 30);
                            }
                        }
                        else
                        {
                            {
                                faceID_tb[i].Text            = "Recognition:" + "NO";
                                faceID_tb[i].RenderTransform = new TranslateTransform(face_x, face_y - 30);
                            }
                        }
                    }
                }
            }
        }
Exemplo n.º 21
0
        //顔のフレームの更新処理
        private void updateFaceFrame()
        {
            // フレームデータを取得する
            PXCMCapture.Sample sample = senceManager.QuerySample();
            UpdateColorImage(sample.color);

            //SenceManagerモジュールの顔のデータを更新する
            faceData.Update();

            //検出した顔の数を取得する
            int numFaces = faceData.QueryNumberOfDetectedFaces();

            //追加:顔のランドマーク(特徴点)のデータの入れ物を用意
            PXCMFaceData.LandmarksData[] landmarkData = new PXCMFaceData.LandmarksData[LANDMARK_MAXFACES];
            PXCMFaceData.LandmarkPoint[] landmarkPoints;
            int numPoints = 0;

            if (senceManager != null)
            {
                //それぞれの顔ごとに情報取得および描画処理を行う
                for (int i = 0; i < numFaces; ++i)
                {
                    //顔の情報を取得する
                    PXCMFaceData.Face face = faceData.QueryFaceByIndex(i);

                    // 顔の位置を取得:Depthで取得する
                    var detection = face.QueryDetection();
                    if (detection != null)
                    {
                        PXCMRectI32 faceRect;
                        detection.QueryBoundingRect(out faceRect);

                        //顔の位置に合わせて長方形を変更
                        TranslateTransform transform = new TranslateTransform(faceRect.x, faceRect.y);
                        rect[i].Width           = faceRect.w;
                        rect[i].Height          = faceRect.h;
                        rect[i].Stroke          = Brushes.Blue;
                        rect[i].StrokeThickness = 3;
                        rect[i].RenderTransform = transform;

                        //追加:フェイスデータからランドマーク(特徴点群)についての情報を得る
                        landmarkData[i] = face.QueryLandmarks();

                        if (landmarkData[i] != null)
                        {
                            //ランドマークデータから何個の特徴点が認識できたかを調べる
                            numPoints = landmarkData[i].QueryNumPoints();
                            //認識できた特徴点の数だけ、特徴点を格納するインスタンスを生成する
                            landmarkPoints = new PXCMFaceData.LandmarkPoint[numPoints];
                            //ランドマークデータから、特徴点の位置を取得、表示
                            if (landmarkData[i].QueryPoints(out landmarkPoints))
                            {
                                for (int j = 0; j < numPoints; j++)
                                {
                                    tb[i, j].Text            = j.ToString();
                                    tb[i, j].RenderTransform = new TranslateTransform(landmarkPoints[j].image.x, landmarkPoints[j].image.y);
                                }
                            }
                        }
                    }
                }
            }
        }
Exemplo n.º 22
0
        private void ProcessingThread()
        {
            // Start AcquireFrame/ReleaseFrame loop
            while (senseManager.AcquireFrame(true) >= pxcmStatus.PXCM_STATUS_NO_ERROR)
            {
                // Acquire the color image data
                PXCMCapture.Sample  sample = senseManager.QuerySample();
                Bitmap              colorBitmap;
                PXCMImage.ImageData colorData;
                sample.color.AcquireAccess(PXCMImage.Access.ACCESS_READ, PXCMImage.PixelFormat.PIXEL_FORMAT_RGB24, out colorData);
                colorBitmap = colorData.ToBitmap(0, sample.color.info.width, sample.color.info.height);

                // Get face data
                if (faceData != null)
                {
                    faceData.Update();
                    numFacesDetected = faceData.QueryNumberOfDetectedFaces();

                    if (numFacesDetected > 0)
                    {
                        // Get the first face detected (index 0)
                        PXCMFaceData.Face face = faceData.QueryFaceByIndex(0);

                        // Retrieve face location data
                        PXCMFaceData.DetectionData faceDetectionData = face.QueryDetection();
                        if (faceDetectionData != null)
                        {
                            PXCMRectI32 faceRectangle;
                            faceDetectionData.QueryBoundingRect(out faceRectangle);
                            faceRectangleHeight = faceRectangle.h;
                            faceRectangleWidth  = faceRectangle.w;
                            faceRectangleX      = faceRectangle.x;
                            faceRectangleY      = faceRectangle.y;
                        }

                        // Process face recognition data
                        if (face != null)
                        {
                            // Retrieve the recognition data instance
                            recognitionData = face.QueryRecognition();

                            // Set the user ID and process register/unregister logic
                            if (recognitionData.IsRegistered())
                            {
                                userId = Convert.ToString(recognitionData.QueryUserID());

                                if (doUnregister)
                                {
                                    recognitionData.UnregisterUser();
                                    doUnregister = false;
                                }
                            }
                            else
                            {
                                if (doRegister)
                                {
                                    recognitionData.RegisterUser();

                                    // Capture a jpg image of registered user
                                    colorBitmap.Save("image.jpg", System.Drawing.Imaging.ImageFormat.Jpeg);

                                    doRegister = false;
                                }
                                else
                                {
                                    userId = "Unrecognized";
                                }
                            }
                        }
                    }
                    else
                    {
                        userId = "No users in view";
                    }
                }

                // Display the color stream and other UI elements
                UpdateUI(colorBitmap);

                // Release resources
                colorBitmap.Dispose();
                sample.color.ReleaseAccess(colorData);
                sample.color.Dispose();

                // Release the frame
                senseManager.ReleaseFrame();
            }
        }
Exemplo n.º 23
0
        private void CaptureProcess()
        {
            Sender = new UDPSender(CurrentIpAdress, Convert.ToInt32(currentPort));
            while (senseManager.AcquireFrame(true).IsSuccessful())
            {
                PXCMCapture.Sample  sample = senseManager.QuerySample();
                PXCMImage.ImageData colorImageData;
                Bitmap colorBitmap;

                sample.color.AcquireAccess(PXCMImage.Access.ACCESS_READ, PXCMImage.PixelFormat.PIXEL_FORMAT_RGB32, out colorImageData);
                colorBitmap = colorImageData.ToBitmap(0, sample.color.info.width, sample.color.info.height);

                if (faceModule != null)
                {
                    PXCMFaceData faceData = faceModule.CreateOutput();
                    faceData.Update();
                    numberTrackedFace = faceData.QueryNumberOfDetectedFaces();

                    PXCMFaceData.Face faceDataFace = faceData.QueryFaceByIndex(0);

                    if (faceDataFace != null)
                    {
                        PXCMFaceData.DetectionData faceDetectionData = faceDataFace.QueryDetection();
                        PXCMFaceData.LandmarksData landMarksData     = faceDataFace.QueryLandmarks();


                        if (faceDetectionData != null) //Запись переменных нахождения фейса
                        {
                            PXCMRectI32 faceRectangle;
                            faceDetectionData.QueryFaceAverageDepth(out faceAverageDepth);
                            faceDetectionData.QueryBoundingRect(out faceRectangle);
                            faceRectangleHeight = faceRectangle.h;
                            faceRectangleWidth  = faceRectangle.w;
                            faceRectangleX      = faceRectangle.x;
                            faceRectangleY      = faceRectangle.y;

                            PXCMFaceData.LandmarkPoint[] points; //Херачим точки на фейс
                            if (landMarksData != null)
                            {
                                bool res = landMarksData.QueryPoints(out points);

                                Graphics graphics = Graphics.FromImage(colorBitmap);
                                Font     font     = new Font(System.Drawing.FontFamily.GenericMonospace, 12, System.Drawing.FontStyle.Bold);

                                foreach (PXCMFaceData.LandmarkPoint landmark in points)
                                {
                                    point.X = landmark.image.x + LandMarkAlingment;
                                    point.Y = landmark.image.y + LandMarkAlingment;
                                    //Console.WriteLine(point.X);

                                    if (landmark.confidenceImage == 0)
                                    {
                                        graphics.DrawString("X", font, System.Drawing.Brushes.Brown, point);
                                    }
                                    else
                                    {
                                        graphics.DrawString("*", font, System.Drawing.Brushes.CornflowerBlue, point);
                                    }
                                    Connect = Math.Min(landmark.confidenceImage, 1);
                                }
                            }
                        }

                        var connectMessage = new SharpOSC.OscMessage("/expressions/connectMessage", Connect);
                        Sender.Send(connectMessage);

                        PXCMFaceData.PoseData facePoseData = faceDataFace.QueryPose(); //переменные поворота для анимации головы
                        if (facePoseData != null)
                        {
                            PXCMFaceData.PoseEulerAngles headAngles;
                            facePoseData.QueryPoseAngles(out headAngles);
                            headRoll  = headAngles.roll;
                            headYaw   = headAngles.yaw;
                            headPitch = headAngles.pitch;
                        }

                        PXCMFaceData.ExpressionsData expressionData = faceDataFace.QueryExpressions();

                        if (expressionData != null)
                        {
                            PXCMFaceData.ExpressionsData.FaceExpressionResult score;

                            expressionData.QueryExpression(PXCMFaceData.ExpressionsData.FaceExpression.EXPRESSION_KISS, out score);
                            Dispatcher.Invoke(() => kissExpression.Text = Convert.ToString(score.intensity));
                            var kissMessage = new SharpOSC.OscMessage("/expressions/kiss", score.intensity);
                            Sender.Send(kissMessage);

                            expressionData.QueryExpression(PXCMFaceData.ExpressionsData.FaceExpression.EXPRESSION_MOUTH_OPEN, out score);
                            Dispatcher.Invoke(() => mouthExpression.Text = Convert.ToString(score.intensity));
                            var mouthMessage = new SharpOSC.OscMessage("/expressions/mouth", score.intensity);
                            Sender.Send(mouthMessage);


                            expressionData.QueryExpression(PXCMFaceData.ExpressionsData.FaceExpression.EXPRESSION_SMILE, out score);
                            Dispatcher.Invoke(() => smileExpression.Text = Convert.ToString(score.intensity));
                            var smileMessage = new SharpOSC.OscMessage("/expressions/smile", score.intensity);
                            Sender.Send(smileMessage);

                            expressionData.QueryExpression(PXCMFaceData.ExpressionsData.FaceExpression.EXPRESSION_TONGUE_OUT, out score);
                            Dispatcher.Invoke(() => tongueExpression.Text = Convert.ToString(score.intensity));
                            var tongueOutMessage = new SharpOSC.OscMessage("/expressions/tongueout", score.intensity);
                            Sender.Send(tongueOutMessage);


                            expressionData.QueryExpression(PXCMFaceData.ExpressionsData.FaceExpression.EXPRESSION_BROW_LOWERER_LEFT, out score);
                            Dispatcher.Invoke(() => leftBrowLowExpression.Text = Convert.ToString(score.intensity));
                            var leftBrowLowMessage = new SharpOSC.OscMessage("/expressions/leftBrowLow", score.intensity);
                            Sender.Send(leftBrowLowMessage);


                            expressionData.QueryExpression(PXCMFaceData.ExpressionsData.FaceExpression.EXPRESSION_BROW_LOWERER_RIGHT, out score);
                            Dispatcher.Invoke(() => rightBrowLowExpression.Text = Convert.ToString(score.intensity));
                            var rightBrowLowMessage = new SharpOSC.OscMessage("/expressions/rightBrowLow", score.intensity);
                            Sender.Send(rightBrowLowMessage);


                            expressionData.QueryExpression(PXCMFaceData.ExpressionsData.FaceExpression.EXPRESSION_BROW_RAISER_LEFT, out score);
                            Dispatcher.Invoke(() => leftBrowRaiseExpression.Text = Convert.ToString(score.intensity));
                            var leftBrowRaiseMessage = new SharpOSC.OscMessage("/expressions/leftBrowRaise", score.intensity);
                            Sender.Send(leftBrowRaiseMessage);

                            expressionData.QueryExpression(PXCMFaceData.ExpressionsData.FaceExpression.EXPRESSION_BROW_RAISER_RIGHT, out score);
                            Dispatcher.Invoke(() => rightBrowRaiseExpression.Text = Convert.ToString(score.intensity));
                            var rightBrowRaiseMessage = new SharpOSC.OscMessage("/expressions/rightBrowRaise", score.intensity);
                            Sender.Send(rightBrowRaiseMessage);


                            expressionData.QueryExpression(PXCMFaceData.ExpressionsData.FaceExpression.EXPRESSION_EYES_CLOSED_LEFT, out score);
                            Dispatcher.Invoke(() => leftEyeClosedExpression.Text = Convert.ToString(score.intensity));
                            var leftEyeClosedMessage = new SharpOSC.OscMessage("/expressions/leftEyeClosed", score.intensity);
                            Sender.Send(leftEyeClosedMessage);


                            expressionData.QueryExpression(PXCMFaceData.ExpressionsData.FaceExpression.EXPRESSION_EYES_CLOSED_RIGHT, out score);
                            Dispatcher.Invoke(() => rightEyeClosedExpression.Text = Convert.ToString(score.intensity));
                            var rightEyeClosedMessage = new SharpOSC.OscMessage("/expressions/rightEyeClosed", score.intensity);
                            Sender.Send(rightEyeClosedMessage);


                            expressionData.QueryExpression(PXCMFaceData.ExpressionsData.FaceExpression.EXPRESSION_EYES_TURN_LEFT, out score);
                            Dispatcher.Invoke(() => eyesTurnLeftExpression.Text = Convert.ToString(score.intensity));
                            var eyesTurnLeftMessage = new SharpOSC.OscMessage("/expressions/eyesTurnLeft", score.intensity);
                            Sender.Send(eyesTurnLeftMessage);


                            expressionData.QueryExpression(PXCMFaceData.ExpressionsData.FaceExpression.EXPRESSION_EYES_TURN_RIGHT, out score);
                            Dispatcher.Invoke(() => eyesTurnRightExpression.Text = Convert.ToString(score.intensity));
                            var eyesTurnRightMessage = new SharpOSC.OscMessage("/expressions/eyesTurnRight", score.intensity);
                            Sender.Send(eyesTurnRightMessage);



                            expressionData.QueryExpression(PXCMFaceData.ExpressionsData.FaceExpression.EXPRESSION_EYES_DOWN, out score);
                            Dispatcher.Invoke(() => eyesDownExpression.Text = Convert.ToString(score.intensity));
                            var eyesDownMessage = new SharpOSC.OscMessage("/expressions/eyesDown", score.intensity);
                            Sender.Send(eyesDownMessage);


                            expressionData.QueryExpression(PXCMFaceData.ExpressionsData.FaceExpression.EXPRESSION_EYES_UP, out score);
                            Dispatcher.Invoke(() => eyesUpExpression.Text = Convert.ToString(score.intensity));
                            var eyesUpMessage = new SharpOSC.OscMessage("/expressions/eyesUp", score.intensity);
                            Sender.Send(eyesUpMessage);


                            expressionData.QueryExpression(PXCMFaceData.ExpressionsData.FaceExpression.EXPRESSION_PUFF_LEFT, out score);
                            Dispatcher.Invoke(() => puffLeftExpression.Text = Convert.ToString(score.intensity));
                            var leftPuffMessage = new SharpOSC.OscMessage("/expressions/leftPuff", score.intensity);
                            Sender.Send(leftPuffMessage);



                            expressionData.QueryExpression(PXCMFaceData.ExpressionsData.FaceExpression.EXPRESSION_PUFF_RIGHT, out score);
                            Dispatcher.Invoke(() => puffRightExpression.Text = Convert.ToString(score.intensity));
                            var rightPuffMessage = new SharpOSC.OscMessage("/expressions/rightPuff", score.intensity);
                            Sender.Send(rightPuffMessage);
                        }
                    }
                    faceData.Dispose();
                }
                UpdateUI(colorBitmap);

                colorBitmap.Dispose();
                sample.color.ReleaseAccess(colorImageData);
                senseManager.ReleaseFrame();
            }
        }
Exemplo n.º 24
0
        public override bool Process(Trigger trigger)
        {
            trigger.ErrorDetected = false;

            if (!SenseToolkitManager.Instance.IsSenseOptionSet(SenseOption.SenseOptionID.Face))
            {
                trigger.ErrorDetected = true;
                return(false);
            }

            if (!(trigger is TrackTrigger))
            {
                trigger.ErrorDetected = true;
                return(false);
            }

            // make sure we have valid values
            if (RealWorldBoxDimensions.x <= 0)
            {
                RealWorldBoxDimensions.x = 1;
            }

            if (RealWorldBoxDimensions.y <= 0)
            {
                RealWorldBoxDimensions.y = 1;
            }

            if (RealWorldBoxDimensions.z <= 0)
            {
                RealWorldBoxDimensions.z = 1;
            }

            if (SenseToolkitManager.Instance.Initialized
                &&
                SenseToolkitManager.Instance.FaceModuleOutput != null)
            {
                if (SenseToolkitManager.Instance.FaceModuleOutput.QueryNumberOfDetectedFaces() == 0)
                {
                    ((TrackTrigger)trigger).Position = Vector3.zero;
                    return(false);
                }

                PXCMFaceData.Face singleFaceOutput = null;

                singleFaceOutput = SenseToolkitManager.Instance.FaceModuleOutput.QueryFaceByIndex(FaceIndex);

                bool success = false;
                if (singleFaceOutput != null && singleFaceOutput.QueryUserID() >= 0)
                {
                    // Process Tracking
                    if (trigger is TrackTrigger)
                    {
                        TrackTrigger specificTrigger = (TrackTrigger)trigger;

                        var  landmarksData = singleFaceOutput.QueryLandmarks();
                        bool hasLandmarks  = false;

                        if (landmarksData != null)
                        {
                            PXCMFaceData.LandmarkPoint outpt = null;
                            bool hasPoint = landmarksData.QueryPoint(landmarksData.QueryPointIndex(LandmarkToTrack), out outpt);
                            if (hasPoint)
                            {
                                hasLandmarks = outpt.confidenceWorld != 0;
                            }
                        }

                        if (!hasLandmarks && useBoundingBox)
                        {
                            PXCMRectI32 rect = new PXCMRectI32();
                            if (singleFaceOutput.QueryDetection() != null && singleFaceOutput.QueryDetection().QueryBoundingRect(out rect))
                            {
                                float depth;
                                singleFaceOutput.QueryDetection().QueryFaceAverageDepth(out depth);
                                float bbCenterX = (rect.x + rect.w / 2);
                                float bbCenterY = (rect.y + rect.h / 2);

                                Vector3 vec = new Vector3();

                                if (_pos_ijz == null)
                                {
                                    _pos_ijz = new PXCMPoint3DF32[1] {
                                        new PXCMPoint3DF32()
                                    };
                                }
                                _pos_ijz[0].x = bbCenterX;
                                _pos_ijz[0].y = bbCenterY;
                                _pos_ijz[0].z = depth;

                                if (_pos3d == null)
                                {
                                    _pos3d = new PXCMPoint3DF32[1] {
                                        new PXCMPoint3DF32()
                                    };
                                }

                                SenseToolkitManager.Instance.Projection.ProjectDepthToCamera(_pos_ijz, _pos3d);

                                Vector3 position = new Vector3();
                                vec.x = _pos3d[0].x / 10f;
                                vec.y = _pos3d[0].y / 10f;
                                vec.z = _pos3d[0].z / 10f;

                                // Clamp and normalize to the Real World Box
                                TrackingUtilityClass.ClampToRealWorldInputBox(ref vec, RealWorldBoxCenter, RealWorldBoxDimensions);
                                TrackingUtilityClass.Normalize(ref vec, RealWorldBoxCenter, RealWorldBoxDimensions);

                                if (!float.IsNaN(vec.x) && !float.IsNaN(vec.y) && !float.IsNaN(vec.z))
                                {
                                    specificTrigger.Position = vec;
                                    return(true);
                                }
                            }
                            else
                            {
                                specificTrigger.Position = Vector3.zero;
                                return(false);
                            }
                        }
                        else if (landmarksData == null && !useBoundingBox)
                        {
                            specificTrigger.Position = Vector3.zero;
                            return(false);
                        }
                        else
                        {
                            int landmarkId = landmarksData.QueryPointIndex(LandmarkToTrack);

                            PXCMFaceData.LandmarkPoint point = null;

                            landmarksData.QueryPoint(landmarkId, out point);

                            // Translation
                            if (point != null)
                            {
                                Vector3 vec = new Vector3();
                                vec.x = -point.world.x * 100f;
                                vec.y = point.world.y * 100f;
                                vec.z = point.world.z * 100f;

                                if (vec.x + vec.y + vec.z == 0)
                                {
                                    specificTrigger.Position = Vector3.zero;
                                    return(false);
                                }

                                // Clamp and normalize to the Real World Box
                                TrackingUtilityClass.ClampToRealWorldInputBox(ref vec, RealWorldBoxCenter, RealWorldBoxDimensions);
                                TrackingUtilityClass.Normalize(ref vec, RealWorldBoxCenter, RealWorldBoxDimensions);

                                if (!float.IsNaN(vec.x) && !float.IsNaN(vec.y) && !float.IsNaN(vec.z))
                                {
                                    specificTrigger.Position = vec;
                                    success = true;
                                }
                            }

                            //Rotation
                            PXCMFaceData.PoseData poseData = singleFaceOutput.QueryPose();
                            if (success && poseData != null)
                            {
                                PXCMFaceData.PoseEulerAngles angles;
                                if (poseData.QueryPoseAngles(out angles))
                                {
                                    if (!float.IsNaN(angles.pitch) && !float.IsNaN(angles.yaw) && !float.IsNaN(angles.roll))
                                    {
                                        Quaternion q = Quaternion.Euler(-angles.pitch, angles.yaw, -angles.roll);

                                        specificTrigger.RotationQuaternion = q;

                                        return(true);
                                    }
                                }
                            }
                        }
                    }
                }
            }
            return(false);
        }
Exemplo n.º 25
0
        private void Update()
        {
            // Start AcquireFrame-ReleaseFrame loop
            while (sm.AcquireFrame(true) >= pxcmStatus.PXCM_STATUS_NO_ERROR)
            {
                // Acquire color image data
                PXCMCapture.Sample  sample = sm.QuerySample();
                Bitmap              colorBitmap;
                PXCMImage.ImageData colorData;
                sample.color.AcquireAccess(PXCMImage.Access.ACCESS_READ, PXCMImage.PixelFormat.PIXEL_FORMAT_RGB32, out colorData);
                colorBitmap = colorData.ToBitmap(0, sample.color.info.width, sample.color.info.height);

                // Create an instance of MyTrackedPerson
                MyTrackedPerson myTrackedPerson = new MyTrackedPerson();
                MyBlobs         myBlobs         = new MyBlobs();

                // Acquire person tracking data
                personData = personModule.QueryOutput();
                myTrackedPerson.PersonsDetected = personData.QueryNumberOfPeople();

                if (myTrackedPerson.PersonsDetected == 1)
                {
                    PXCMPersonTrackingData.Person         trackedPerson     = personData.QueryPersonData(PXCMPersonTrackingData.AccessOrderType.ACCESS_ORDER_BY_ID, 0);
                    PXCMPersonTrackingData.PersonTracking trackedPersonData = trackedPerson.QueryTracking();
                    PXCMPersonTrackingData.BoundingBox2D  personBox         = trackedPersonData.Query2DBoundingBox();
                    myTrackedPerson.X = personBox.rect.x;
                    myTrackedPerson.Y = personBox.rect.y;
                    myTrackedPerson.H = personBox.rect.h;
                    myTrackedPerson.W = personBox.rect.w;

                    /*
                     * PXCMPersonTrackingData.PersonJoints personJoints = trackedPerson.QuerySkeletonJoints();
                     * PXCMPersonTrackingData.PersonJoints.SkeletonPoint[] skeletonPoints = new PXCMPersonTrackingData.PersonJoints.SkeletonPoint[personJoints.QueryNumJoints()];
                     * trackedPerson.QuerySkeletonJoints().QueryJoints(skeletonPoints);
                     * if (skeletonPoints.Length > 0)
                     *  skeletonPoints[0].GetType();
                     */
                }

                // Acquire face tracking data
                faceData.Update();
                myTrackedPerson.FacesDetected = faceData.QueryNumberOfDetectedFaces();

                if (myTrackedPerson.FacesDetected == 1)
                {
                    PXCMFaceData.Face          face = faceData.QueryFaceByIndex(0);
                    PXCMFaceData.DetectionData faceDetectionData = face.QueryDetection();
                    PXCMRectI32 faceRectangle;
                    faceDetectionData.QueryBoundingRect(out faceRectangle);
                    myTrackedPerson.FaceH = faceRectangle.h;
                    myTrackedPerson.FaceW = faceRectangle.w;
                    myTrackedPerson.FaceX = faceRectangle.x;
                    myTrackedPerson.FaceY = faceRectangle.y;
                    float faceDepth;
                    faceDetectionData.QueryFaceAverageDepth(out faceDepth);
                    myTrackedPerson.FaceDepth = faceDepth;
                }

                blobData.Update();
                int numBlobs = blobData.QueryNumberOfBlobs();
                myBlobs.numBlobs      = numBlobs;
                myBlobs.blobs         = new List <List <PXCMPointI32> >(numBlobs);
                myBlobs.closestPoints = new List <PXCMPoint3DF32>(4);
                for (int i = 0; i < numBlobs; i++)
                {
                    PXCMBlobData.IBlob blob;
                    pxcmStatus         result1 = blobData.QueryBlob(i, PXCMBlobData.SegmentationImageType.SEGMENTATION_IMAGE_DEPTH, PXCMBlobData.AccessOrderType.ACCESS_ORDER_NEAR_TO_FAR, out blob);
                    if (result1 == pxcmStatus.PXCM_STATUS_NO_ERROR)
                    {
                        PXCMPoint3DF32 closestPoint = blob.QueryExtremityPoint(PXCMBlobData.ExtremityType.EXTREMITY_CLOSEST);
                        myBlobs.closestPoints.Add(closestPoint);

                        int numContours = blob.QueryNumberOfContours();
                        if (numContours > 0)
                        {
                            // only deal with outer contour
                            for (int j = 0; j < numContours; j++)
                            {
                                PXCMBlobData.IContour contour;
                                pxcmStatus            result2 = blob.QueryContour(j, out contour);
                                if (result2 == pxcmStatus.PXCM_STATUS_NO_ERROR)
                                {
                                    if (contour.IsOuter())
                                    {
                                        PXCMPointI32[] points;
                                        pxcmStatus     result3 = contour.QueryPoints(out points);
                                        if (result3 == pxcmStatus.PXCM_STATUS_NO_ERROR)
                                        {
                                            int numPoints = points.Length;
                                            myBlobs.blobs.Add(points.ToList <PXCMPointI32>());
                                        }
                                    }
                                }
                            }
                        }
                    }
                }

                // Update UI
                Render(colorBitmap, myTrackedPerson, myBlobs);

                // serialize to json and send all clients

                var personJson = JsonConvert.SerializeObject(myTrackedPerson);
                personSockets.ToList().ForEach(s => s.Send(personJson));

                var blobJson = JsonConvert.SerializeObject(myBlobs);
                blobSockets.ToList().ForEach(s => s.Send(blobJson));

                // deserialize json as follows
                //MyTrackedPerson deserializedProduct = JsonConvert.DeserializeObject<MyTrackedPerson>(json);

                // Release resources
                colorBitmap.Dispose();
                sample.color.ReleaseAccess(colorData);
                sm.ReleaseFrame();
            }
        }
        private void ProcessingThread()
        {
            // Start AcquireFrame/ReleaseFrame loop
            while (senseManager.AcquireFrame(true) >= pxcmStatus.PXCM_STATUS_NO_ERROR)
            {
                // Acquire the color image data
                PXCMCapture.Sample  sample = senseManager.QuerySample();
                Bitmap              colorBitmap;
                PXCMImage.ImageData colorData;
                int            topScore   = 0;
                FaceExpression expression = FaceExpression.None;

                sample.color.AcquireAccess(PXCMImage.Access.ACCESS_READ, PXCMImage.PixelFormat.PIXEL_FORMAT_RGB24, out colorData);
                colorBitmap = colorData.ToBitmap(0, sample.color.info.width, sample.color.info.height);

                try
                {
                    IBarcodeReader reader = new BarcodeReader();
                    // load a bitmap
                    //var barcodeBitmap = (Bitmap)Bitmap.LoadFrom("C:\\sample-barcode-image.png");
                    // detect and decode the barcode inside the bitmap
                    var result = reader.Decode(colorBitmap);
                    // do something with the result
                    if (result != null)
                    {
                        MessageBox.Show(result.BarcodeFormat.ToString());
                        MessageBox.Show(result.Text);
                    }
                }
                catch (Exception ex)
                {
                }

                // Get face data
                if (faceData != null)
                {
                    faceData.Update();
                    numFacesDetected = faceData.QueryNumberOfDetectedFaces();

                    if (numFacesDetected > 0)
                    {
                        // Get the first face detected (index 0)
                        PXCMFaceData.Face face = faceData.QueryFaceByIndex(0);

                        // Retrieve face location data
                        PXCMFaceData.DetectionData faceDetectionData = face.QueryDetection();
                        if (faceDetectionData != null)
                        {
                            PXCMRectI32 faceRectangle;
                            faceDetectionData.QueryBoundingRect(out faceRectangle);
                            if ((faceRectangle.h > 90) || (faceRectangle.w > 90))
                            {
                                faceRectangleHeight = faceRectangle.h * 3 / 2;
                                faceRectangleWidth  = faceRectangle.w * 3 / 2;
                            }
                            else if (((faceRectangle.h < 90) || (faceRectangle.w < 90)) && ((faceRectangle.h > 70) || (faceRectangle.w > 70)))
                            {
                                faceRectangleHeight = faceRectangle.h * 2;
                                faceRectangleWidth  = faceRectangle.w * 2;
                            }
                            else
                            {
                                faceRectangleHeight = faceRectangle.h * 5 / 2;
                                faceRectangleWidth  = faceRectangle.w * 5 / 2;
                            }
                            faceRectangleX = faceRectangle.x;
                            faceRectangleY = faceRectangle.y;
                        }

                        // Retrieve pose estimation data
                        PXCMFaceData.PoseData facePoseData = face.QueryPose();
                        if (facePoseData != null)
                        {
                            PXCMFaceData.PoseEulerAngles headAngles;
                            facePoseData.QueryPoseAngles(out headAngles);
                            headRoll  = headAngles.roll;
                            headPitch = headAngles.pitch;
                            headYaw   = headAngles.yaw;
                        }

                        // Retrieve expression data
                        PXCMFaceData.ExpressionsData expressionData = face.QueryExpressions();

                        if (expressionData != null)
                        {
                            PXCMFaceData.ExpressionsData.FaceExpressionResult score;

                            expressionData.QueryExpression(PXCMFaceData.ExpressionsData.FaceExpression.EXPRESSION_KISS, out score);
                            expressionScore[Convert.ToInt32(FaceExpression.Kiss)] = score.intensity;

                            expressionData.QueryExpression(PXCMFaceData.ExpressionsData.FaceExpression.EXPRESSION_MOUTH_OPEN, out score);
                            expressionScore[Convert.ToInt32(FaceExpression.Open)] = score.intensity;

                            expressionData.QueryExpression(PXCMFaceData.ExpressionsData.FaceExpression.EXPRESSION_SMILE, out score);
                            expressionScore[Convert.ToInt32(FaceExpression.Smile)] = score.intensity;

                            expressionData.QueryExpression(PXCMFaceData.ExpressionsData.FaceExpression.EXPRESSION_TONGUE_OUT, out score);


                            expressionScore[Convert.ToInt32(FaceExpression.Tongue)] = score.intensity;

                            // Determine the highest scoring expression
                            for (int i = 1; i < TotalExpressions; i++)
                            {
                                if (expressionScore[i] > topScore)
                                {
                                    expression = (FaceExpression)i;
                                }
                            }
                        }

                        // Process face recognition data
                        if (face != null)
                        {
                            // Retrieve the recognition data instance
                            recognitionData = face.QueryRecognition();

                            // Set the user ID and process register/unregister logic
                            if (recognitionData.IsRegistered())
                            {
                                userId = Convert.ToString(recognitionData.QueryUserID());

                                if (doUnregister)
                                {
                                    recognitionData.UnregisterUser();
                                    SaveDatabaseToFile();
                                    doUnregister = false;
                                    if (_persistentDict.ContainsKey(userId) == true)
                                    {
                                        _persistentDict.Remove(userId);
                                    }
                                }
                            }
                            else
                            {
                                if (doRegister)
                                {
                                    int uId = recognitionData.RegisterUser();
                                    SaveDatabaseToFile();

                                    if (newUserName != "")
                                    {
                                        if (_persistentDict.ContainsKey(uId.ToString()) == false)
                                        {
                                            _persistentDict.Add(uId.ToString(), newUserName);
                                            _persistentDict.Flush();
                                            newUserName = "";
                                        }
                                    }

                                    // Capture a jpg image of registered user
                                    colorBitmap.Save("image.jpg", System.Drawing.Imaging.ImageFormat.Jpeg);

                                    doRegister = false;
                                }
                                else
                                {
                                    userId = "New User";
                                }
                            }
                        }
                    }
                    else
                    {
                        userId = "No users in view";
                    }
                }

                //hand = senseManager.QueryHand();

                //if (hand != null)
                //{

                //    // Retrieve the most recent processed data
                //    handData = hand.CreateOutput();
                //    handData.Update();

                //    // Get number of tracked hands
                //    nhands = handData.QueryNumberOfHands();

                //    if (nhands > 0)
                //    {
                //        // Retrieve hand identifier
                //        handData.QueryHandId(PXCMHandData.AccessOrderType.ACCESS_ORDER_BY_TIME, 0, out handId);

                //        // Retrieve hand data
                //        handData.QueryHandDataById(handId, out ihand);

                //        PXCMHandData.BodySideType bodySideType = ihand.QueryBodySide();
                //        if (bodySideType == PXCMHandData.BodySideType.BODY_SIDE_LEFT)
                //        {
                //            leftHand = true;
                //        }
                //        else if (bodySideType == PXCMHandData.BodySideType.BODY_SIDE_RIGHT)
                //        {
                //            leftHand = false;
                //        }



                //        // Retrieve all hand joint data
                //        for (int i = 0; i < nhands; i++)
                //        {
                //            for (int j = 0; j < 0x20; j++)
                //            {
                //                PXCMHandData.JointData jointData;
                //                ihand.QueryTrackedJoint((PXCMHandData.JointType)j, out jointData);
                //                nodes[i][j] = jointData;
                //            }
                //        }

                //        // Get world coordinates for tip of middle finger on the first hand in camera range
                //        handTipX = nodes[0][Convert.ToInt32(PXCMHandData.JointType.JOINT_MIDDLE_TIP)].positionWorld.x;
                //        handTipY = nodes[0][Convert.ToInt32(PXCMHandData.JointType.JOINT_MIDDLE_TIP)].positionWorld.y;
                //        handTipZ = nodes[0][Convert.ToInt32(PXCMHandData.JointType.JOINT_MIDDLE_TIP)].positionWorld.z;


                //        swipehandTipX = nodes[0][Convert.ToInt32(PXCMHandData.JointType.JOINT_MIDDLE_TIP)].positionImage.x;
                //        swipehandTipY = nodes[0][Convert.ToInt32(PXCMHandData.JointType.JOINT_MIDDLE_TIP)].positionImage.y;
                //        swipehandTipZ = nodes[0][Convert.ToInt32(PXCMHandData.JointType.JOINT_MIDDLE_TIP)].positionImage.z;

                //        //Console.Out.WriteLine("Before x={0}", swipehandTipX);
                //        //Console.Out.WriteLine("Before speed={0}", nodes[0][Convert.ToInt32(PXCMHandData.JointType.JOINT_MIDDLE_TIP)].speed.x);

                //        // Retrieve gesture data
                //        if (handData.IsGestureFired("spreadfingers", out gestureData)) { gesture = Gesture.FingerSpread; }
                //        else if (handData.IsGestureFired("two_fingers_pinch_open", out gestureData)) { gesture = Gesture.Pinch; }
                //        else if (handData.IsGestureFired("wave", out gestureData)) { gesture = Gesture.Wave; }
                //        else if (handData.IsGestureFired("swipe_left", out gestureData)) { gesture = Gesture.SwipeLeft; }
                //        else if (handData.IsGestureFired("swipe_right", out gestureData)) { gesture = Gesture.SwipeRight; }
                //        else if (handData.IsGestureFired("fist", out gestureData)) { gesture = Gesture.Fist; }
                //        else if (handData.IsGestureFired("thumb_up", out gestureData)) { gesture = Gesture.Thumb; }

                //    }
                //    else
                //    {
                //        gesture = Gesture.Undefined;
                //    }

                //    //UpdateUI();
                //    if (handData != null) handData.Dispose();
                //}

                // Display the color stream and other UI elements
                //UpdateUI(colorBitmap, expression, gesture);


                UpdateUI(colorBitmap, expression);

                // Release resources
                colorBitmap.Dispose();
                sample.color.ReleaseAccess(colorData);
                sample.color.Dispose();

                // Release the frame
                senseManager.ReleaseFrame();
            }
        }
Exemplo n.º 27
0
        private void ProcessingThread()
        {
            // Start AcquireFrame/ReleaseFrame loop
            while (senseManager.AcquireFrame(true) >= pxcmStatus.PXCM_STATUS_NO_ERROR)
            {
                // Acquire the color image data
                PXCMCapture.Sample  sample = senseManager.QuerySample();
                Bitmap              colorBitmap;
                PXCMImage.ImageData colorData;
                sample.color.AcquireAccess(PXCMImage.Access.ACCESS_READ, PXCMImage.PixelFormat.PIXEL_FORMAT_RGB24, out colorData);
                colorBitmap = colorData.ToBitmap(0, sample.color.info.width, sample.color.info.height);

                // Get face data
                if (faceData != null)
                {
                    faceData.Update();
                    numFacesDetected = faceData.QueryNumberOfDetectedFaces();

                    if (numFacesDetected > 0)
                    {
                        // Get the first face detected (index 0)
                        PXCMFaceData.Face face = faceData.QueryFaceByIndex(0);
                        face.QueryExpressions();
                        PXCMFaceData.PoseData poseData = face.QueryPose();
                        //  PXCMPoint3DF32 outHeadPosition = new PXCMPoint3DF32(); //F200 has added confidence into struct
                        PXCMFaceData.HeadPosition outHeadPosition = new PXCMFaceData.HeadPosition();

                        //processing the head pose data to find the head center position
                        poseData.QueryHeadPosition(out outHeadPosition);
                        System.Windows.Media.Media3D.Point3DCollection points = new System.Windows.Media.Media3D.Point3DCollection();
                        points.Add(new System.Windows.Media.Media3D.Point3D(outHeadPosition.headCenter.x,
                                                                            outHeadPosition.headCenter.y, outHeadPosition.headCenter.z));

                        Console.WriteLine("head center position: " + points);
                        // poseData.QueryHeadPosition(out outHeadPosition);
                        PXCMFaceData.PoseEulerAngles outPoseEulerAngles = new PXCMFaceData.PoseEulerAngles();
                        poseData.QueryPoseAngles(out outPoseEulerAngles);
                        roll  = (int)outPoseEulerAngles.roll;
                        pitch = (int)outPoseEulerAngles.pitch;
                        yaw   = (int)outPoseEulerAngles.yaw;
                        // PXCMFaceData.LandmarkType.LANDMARK_EYE_LEFT_CENTER what can I do with this?
                        if (pitch + 12 > 10)
                        {
                            headUp = true;
                        }
                        else
                        {
                            headUp = false;
                        }
                        if (pitch < -10)
                        {
                            headDown = true;
                        }
                        else
                        {
                            headDown = false;
                        }
                        if (roll > 5)
                        {
                            headTiltLeft = true;
                        }
                        else
                        {
                            headTiltLeft = false;
                        }
                        if (roll < -5)
                        {
                            headTiltRight = true;
                        }
                        else
                        {
                            headTiltRight = false;
                        }
                        if (yaw > 5)
                        {
                            headTurnLeft = true;
                        }
                        else
                        {
                            headTurnLeft = false;
                        }
                        if (yaw < -5)
                        {
                            headTurnRight = true;
                        }
                        else
                        {
                            headTurnRight = false;
                        }

                        //Console.WriteLine("Rotation: " + outPoseEulerAngles.roll + " " + outPoseEulerAngles.pitch + " " + outPoseEulerAngles.yaw);
                        PXCMFaceData.ExpressionsData edata = face.QueryExpressions();
                        // retrieve the expression information
                        PXCMFaceData.ExpressionsData.FaceExpressionResult smileScore;
                        PXCMFaceData.ExpressionsData.FaceExpressionResult eyesUpScore;
                        PXCMFaceData.ExpressionsData.FaceExpressionResult eyesDownScore;
                        PXCMFaceData.ExpressionsData.FaceExpressionResult eyesTurnLeftScore;
                        PXCMFaceData.ExpressionsData.FaceExpressionResult eyesTurnRightScore;
                        PXCMFaceData.ExpressionsData.FaceExpressionResult headTiltedLeftScore;
                        PXCMFaceData.ExpressionsData.FaceExpressionResult headTurnedLeftScore;
                        // PXCMFaceData.ExpressionsData.FaceExpressionResult headUpScore;
                        //PXCMFaceData.ExpressionsData.FaceExpressionResult headDownScore;
                        edata.QueryExpression(PXCMFaceData.ExpressionsData.FaceExpression.EXPRESSION_SMILE, out smileScore);
                        edata.QueryExpression(PXCMFaceData.ExpressionsData.FaceExpression.EXPRESSION_EYES_UP, out eyesUpScore);
                        edata.QueryExpression(PXCMFaceData.ExpressionsData.FaceExpression.EXPRESSION_EYES_DOWN, out eyesDownScore);
                        edata.QueryExpression(PXCMFaceData.ExpressionsData.FaceExpression.EXPRESSION_EYES_TURN_LEFT, out eyesTurnLeftScore);
                        edata.QueryExpression(PXCMFaceData.ExpressionsData.FaceExpression.EXPRESSION_EYES_TURN_RIGHT, out eyesTurnRightScore);
                        edata.QueryExpression(PXCMFaceData.ExpressionsData.FaceExpression.EXPRESSION_HEAD_TILT_LEFT, out headTiltedLeftScore);
                        edata.QueryExpression(PXCMFaceData.ExpressionsData.FaceExpression.EXPRESSION_HEAD_TURN_LEFT, out headTurnedLeftScore);
                        // edata.QueryExpression(PXCMFaceData.ExpressionsData.FaceExpression.EXPRESSION_HEAD_UP, out headUpScore);
                        //edata.QueryExpression(PXCMFaceData.ExpressionsData.FaceExpression.EXPRESSION_HEAD_DOWN, out headDownScore);
                        smile  = smileScore.intensity;
                        eyesUp = eyesUpScore.intensity;
                        if (eyesUp == 100)
                        {
                            eyeIsUp = true;
                        }
                        else
                        {
                            eyeIsUp = false;
                        }
                        eyesDown = eyesDownScore.intensity;
                        if (eyesDown == 100)
                        {
                            eyeIsDown = true;
                        }
                        else
                        {
                            eyeIsDown = false;
                        }

                        eyesTurnLeft  = eyesTurnLeftScore.intensity;
                        eyesTurnRight = eyesTurnRightScore.intensity;
                        //  headTiltLeft = headTiltedLeftScore.intensity;
                        // headTurnLeft= headTurnedLeftScore.intensity;
                        // headUp = headUpScore.intensity;
                        //headDown = headDownScore.intensity;
                        PXCMCapture.Device device = senseManager.captureManager.device;
                        device.SetIVCAMAccuracy(PXCMCapture.Device.IVCAMAccuracy.IVCAM_ACCURACY_FINEST);
                        // eyeIsUP= CheckFaceExpression(edata, FaceExpression.EXPRESSION_EYES_UP, 15);
                        if ((headTiltLeft | headTurnLeft) & headUp & (eyesTurnLeft == 100) & (!eyeIsDown))
                        {
                            looksForward = true;
                        }

                        else if ((headTiltRight | headTurnRight) & headUp & (eyesTurnRight == 100) & (!eyeIsDown))
                        {
                            looksForward = true;
                        }


                        /* else if (headTiltRight & (headDown|headUp) & (!headTurnRight) & (eyesTurnRight==100))
                         *   looksForward = true;
                         * else if (headTiltLeft & (headDown|headUp) &  (!headTurnLeft) & (eyesTurnLeft == 100))
                         *   looksForward = true;
                         * */
                        else
                        {
                            looksForward = eyeIsUp;
                        }
                        //  headTiltLeftThreshold = CheckFaceExpression(edata, FaceExpression.EXPRESSION_HEAD_TILT_LEFT, 15);

                        //csv mona
                        // var csv = new StringBuilder();
                        // outputs 10:00 PM
                        //    var newLine = string.Format("{0},{1},{2},{3},{4}{5}", DateTime.Now.ToString("dd-MM-yyyy-hh:mm:ss:fff"), roll, pitch, yaw, eyesUp, Environment.NewLine);
                        //     csv.Append(newLine);
                        // string pathString = System.IO.Path.Combine(filePath, fileName);

                        //   File.AppendAllText(pathString, csv.ToString());



                        // Retrieve face location data
                        PXCMFaceData.DetectionData faceDetectionData = face.QueryDetection();
                        if (faceDetectionData != null)
                        {
                            PXCMRectI32 faceRectangle;
                            faceDetectionData.QueryBoundingRect(out faceRectangle);
                            faceRectangleHeight = faceRectangle.h;
                            faceRectangleWidth  = faceRectangle.w;
                            faceRectangleX      = faceRectangle.x;
                            faceRectangleY      = faceRectangle.y;
                        }


                        // Process face recognition data
                        if (face != null)
                        {
                            // Retrieve the recognition data instance
                            recognitionData = face.QueryRecognition();

                            // Set the user ID and process register/unregister logic
                            if (recognitionData.IsRegistered())
                            {
                                userId = Convert.ToString(recognitionData.QueryUserID());

                                if (doUnregister)
                                {
                                    recognitionData.UnregisterUser();
                                    doUnregister = false;
                                }
                            }
                            else
                            {
                                if (doRegister)
                                {
                                    recognitionData.RegisterUser();

                                    // Capture a jpg image of registered user
                                    colorBitmap.Save("image.jpg", System.Drawing.Imaging.ImageFormat.Jpeg);

                                    doRegister = false;
                                }
                                else
                                {
                                    userId = "Unrecognized";
                                }
                            }
                        }
                    }
                    else
                    {
                        userId = "No users in view";
                    }
                }

                // Display the color stream and other UI elements
                UpdateUI(colorBitmap);

                // Release resources
                colorBitmap.Dispose();
                sample.color.ReleaseAccess(colorData);
                sample.color.Dispose();

                // Release the frame
                senseManager.ReleaseFrame();
            }
        }
Exemplo n.º 28
0
        private void updateFaceFrame()
        {
            PXCMCapture.Sample sample = this.senseManager.QuerySample();
            this.image = sample.color;

            if (this.image != null)
            {
                this.invalidate = true;
            }

            this.faceData.Update();

            // get number of faces
            FOutFaceLandmarkPoints.SliceCount    = 0;
            FOutFaceExpressionsResult.SliceCount = 0;
            int numFaces = this.faceData.QueryNumberOfDetectedFaces();

            for (int i = 0; i < numFaces; ++i)
            {
                // get faces info
                PXCMFaceData.Face face = this.faceData.QueryFaceByIndex(i);

                // get face position by Depth
                var detection = face.QueryDetection();
                if (detection != null)
                {
                    // detection
                    PXCMRectI32 faceRect;
                    detection.QueryBoundingRect(out faceRect);
                    int sliceCount = i + 1;
                    FOutFacePosition.SliceCount = sliceCount;
                    FOutFacePosition[i]         = new Vector2D(faceRect.x, faceRect.y);
                    FOutFaceWidth.SliceCount    = sliceCount;
                    FOutFaceWidth[i]            = faceRect.w;
                    FOutFaceHeight.SliceCount   = sliceCount;
                    FOutFaceHeight[i]           = faceRect.h;

                    // pose(only use Depth mode)
                    PXCMFaceData.PoseData pose = face.QueryPose();
                    if (pose != null)
                    {
                        // faces angle
                        PXCMFaceData.PoseEulerAngles poseAngle = new PXCMFaceData.PoseEulerAngles();
                        pose.QueryPoseAngles(out poseAngle);
                        FOutFacePose.SliceCount = sliceCount;
                        FOutFacePose[i]         = new Vector3D(poseAngle.pitch, poseAngle.yaw, poseAngle.roll);
                    }

                    // landmarks
                    PXCMFaceData.LandmarksData landmarks = face.QueryLandmarks();
                    FOutFaceLandmarkBinSize.SliceCount = sliceCount;
                    if (landmarks != null)
                    {
                        // number of feature points from landmarks
                        int numPoints = landmarks.QueryNumPoints();
                        FOutFaceLandmarkBinSize[i] = numPoints;

                        PXCMFaceData.LandmarkPoint[] landmarkPoints = new PXCMFaceData.LandmarkPoint[numPoints];
                        int prevSliceCount = FOutFaceLandmarkPoints.SliceCount;
                        FOutFaceLandmarkPoints.SliceCount = prevSliceCount + numPoints;

                        if (landmarks.QueryPoints(out landmarkPoints))
                        {
                            for (int j = 0; j < numPoints; j++)
                            {
                                int index = prevSliceCount + j;
                                FOutFaceLandmarkPoints[index] = new Vector2D(landmarkPoints[j].image.x, landmarkPoints[j].image.y);
                            }
                        }
                    }
                    else
                    {
                        FOutFaceLandmarkBinSize[i]        = 0;
                        FOutFaceLandmarkPoints.SliceCount = 0;
                    }

                    PXCMFaceData.ExpressionsData expressionData = face.QueryExpressions();
                    if (expressionData != null)
                    {
                        for (int j = 0; j < FInExpressions.SliceCount; j++)
                        {
                            PXCMFaceData.ExpressionsData.FaceExpressionResult expressionResult;
                            if (expressionData.QueryExpression(FInExpressions[j], out expressionResult))
                            {
                                FOutFaceExpressionsResult.SliceCount++;
                                FOutFaceExpressionsResult[j] = expressionResult.intensity;
                            }
                            else
                            {
                                // do nothing
                            }
                        }
                    }
                    else
                    {
                        FOutFaceExpressionsResult.SliceCount = 0;
                    }

                    PXCMFaceData.PulseData pulseData = face.QueryPulse();
                    if (pulseData != null)
                    {
                        FOutPulse.SliceCount = sliceCount;
                        FOutPulse[i]         = pulseData.QueryHeartRate();
                    }
                }
            }
        }
Exemplo n.º 29
0
        private void ProcessingThread()
        {
            try
            {
                while (senseManager.AcquireFrame(true) >= pxcmStatus.PXCM_STATUS_NO_ERROR)
                {
                    PXCMCapture.Sample sample = senseManager.QuerySample();

                    PXCMImage.ImageData colorData;

                    sample.color.AcquireAccess(PXCMImage.Access.ACCESS_READ, PXCMImage.PixelFormat.PIXEL_FORMAT_RGB24, out colorData);

                    Bitmap colorBitmap = colorData.ToBitmap(0, sample.color.info.width, sample.color.info.height);

                    // Get face data
                    if (faceData != null)
                    {
                        faceData.Update();
                        numFacesDetected = faceData.QueryNumberOfDetectedFaces();

                        if (numFacesDetected > 0)
                        {
                            // Get the first face detected (index 0)
                            PXCMFaceData.Face face = faceData.QueryFaceByIndex(numFacesDetected - 1);

                            // Retrieve face location data
                            PXCMFaceData.DetectionData faceDetectionData = face.QueryDetection();
                            if (faceDetectionData != null)
                            {
                                PXCMRectI32 faceRectangle;
                                faceDetectionData.QueryBoundingRect(out faceRectangle);
                                faceRectangleHeight = faceRectangle.h;
                                faceRectangleWidth  = faceRectangle.w;
                                faceRectangleX      = faceRectangle.x;
                                faceRectangleY      = faceRectangle.y;
                            }

                            // Process face recognition data
                            if (face != null)
                            {
                                // Retrieve the recognition data instance
                                recognitionData = face.QueryRecognition();

                                // Set the user ID and process register/unregister logic
                                if (recognitionData.IsRegistered())
                                {
                                    userId = Convert.ToString(recognitionData.QueryUserID());
                                    //if (lastUserId == userId)
                                    if (flagUserId != userId)
                                    {
                                        Actions.LoadUser(Convert.ToInt16(userId), 255, "userinview", true);
                                        flagUserId = userId;
                                    }

                                    if (doUnregister)
                                    {
                                        recognitionData.UnregisterUser();
                                        SaveDatabaseToFile();
                                        doUnregister = false;
                                    }
                                }
                                else
                                {
                                    if (doRegister)
                                    {
                                        recognitionData.RegisterUser();

                                        // Capture a jpg image of registered user
                                        colorBitmap.Save("image.jpg", System.Drawing.Imaging.ImageFormat.Jpeg);

                                        doRegister = false;
                                    }
                                    else
                                    {
                                        userId = "Unrecognized";
                                    }
                                }
                            }
                        }
                        else
                        {
                            userId     = "No users in view";
                            flagUserId = "modifyed";
                        }
                    }

                    // Release resources
                    colorBitmap.Dispose();
                    sample.color.ReleaseAccess(colorData);
                    sample.color.Dispose();

                    senseManager.ReleaseFrame();

                    coords = faceRectangleX.ToString() + " " + faceRectangleY.ToString() + " " + faceRectangleWidth.ToString() + " " + faceRectangleHeight.ToString();
                    Server.sendMsg(255, "rect", coords, userId);
                }
            }
            catch
            {
                Console.WriteLine("ERRO ProcessingThread");
            }
        }
Exemplo n.º 30
0
    /// <summary>
    /// Update is called every frame, if the MonoBehaviour is enabled.
    /// </summary>
    void Update()
    {
        /* Make sure PXCMSenseManager Instance is Initialized */
        if (psm == null)
        {
            return;
        }

        /* Wait until any frame data is available true(aligned) false(unaligned) */
        if (psm.AcquireFrame(true) != pxcmStatus.PXCM_STATUS_NO_ERROR)
        {
            return;
        }

        /* Retrieve face tracking data if ready */
        faceAnalyzer = psm.QueryFace();
        if (faceAnalyzer != null)
        {
            PXCMFaceData _outputData = faceAnalyzer.CreateOutput();
            if (_outputData != null)
            {
                _outputData.Update();

                for (int i = 0; i < _outputData.QueryNumberOfDetectedFaces(); i++)
                {
                    PXCMFaceData.Face _iFace = _outputData.QueryFaceByIndex(i);
                    if (_iFace != null)
                    {
                        /* Retrieve 78 Landmark Points */
                        PXCMFaceData.LandmarksData LandmarkData = _iFace.QueryLandmarks();
                        if (LandmarkData != null)
                        {
                            PXCMFaceData.LandmarkPoint[] landmarkPoints = new PXCMFaceData.LandmarkPoint[MaxPoints];
                            if (LandmarkData.QueryPoints(out landmarkPoints))
                            {
                                faceRenderer.DisplayJoints2D(landmarkPoints);
                            }
                        }

                        /* Retrieve Detection Data */
                        PXCMFaceData.DetectionData detectionData = _iFace.QueryDetection();
                        if (detectionData != null)
                        {
                            PXCMRectI32 rect;
                            if (detectionData.QueryBoundingRect(out rect))
                            {
                                faceRenderer.SetDetectionRect(rect);
                            }
                        }

                        /* Retrieve Pose Data */
                        PXCMFaceData.PoseData poseData = _iFace.QueryPose();
                        if (poseData != null)
                        {
                            PXCMFaceData.PoseQuaternion poseQuaternion;
                            if (poseData.QueryPoseQuaternion(out poseQuaternion))
                            {
                                faceRenderer.DisplayPoseQuaternion(poseQuaternion);
                            }
                        }

                        /* Retrieve Expression Data */
                        PXCMFaceData.ExpressionsData expressionData = _iFace.QueryExpressions();
                        if (expressionData != null)
                        {
                            PXCMFaceData.ExpressionsData.FaceExpressionResult expressionResult;
                            if (expressionData.QueryExpression(PXCMFaceData.ExpressionsData.FaceExpression.EXPRESSION_MOUTH_OPEN, out expressionResult))
                            {
                                faceRenderer.DisplayExpression(expressionResult, PXCMFaceData.ExpressionsData.FaceExpression.EXPRESSION_MOUTH_OPEN);
                            }
                        }
                    }
                }

                /* Retrieve Alert Data */
                PXCMFaceData.AlertData _alertData;
                for (int i = 0; i < _outputData.QueryFiredAlertsNumber(); i++)
                {
                    if (_outputData.QueryFiredAlertData(i, out _alertData) == pxcmStatus.PXCM_STATUS_NO_ERROR)
                    {
                        faceRenderer.DisplayAlerts(_alertData);
                    }
                }
            }
            _outputData.Dispose();
        }

        /* Retrieve a sample from the camera */
        PXCMCapture.Sample sample = psm.QueryFaceSample();
        if (sample != null)
        {
            colorImage = sample.color;
            if (colorImage != null)
            {
                if (colorTexture2D == null)
                {
                    /* If not allocated, allocate a Texture2D */
                    colorTexture2D = new Texture2D(colorImage.info.width, colorImage.info.height, TextureFormat.ARGB32, false);

                    /* Associate the Texture2D with a gameObject */
                    colorPlane.renderer.material.mainTexture = colorTexture2D;
                    //colorPlane.renderer.material.mainTextureScale = new Vector2(-1f, 1f);
                }

                /* Retrieve the image data in Texture2D */
                PXCMImage.ImageData colorImageData;
                colorImage.AcquireAccess(PXCMImage.Access.ACCESS_READ, PXCMImage.PixelFormat.PIXEL_FORMAT_RGB32, out colorImageData);
                colorImageData.ToTexture2D(0, colorTexture2D);
                colorImage.ReleaseAccess(colorImageData);

                /* Apply the texture to the GameObject to display on */
                colorTexture2D.Apply();
            }
        }

        /* Realease the frame to process the next frame */
        psm.ReleaseFrame();
    }