Ejemplo n.º 1
0
        private void Initialize()
        {
            if (kinectSensor == null)
            {
                return;
            }

            audioManager = new AudioStreamManager(kinectSensor.AudioSource);
            audioBeamAngle.DataContext = audioManager;

            kinectSensor.ColorStream.Enable(ColorImageFormat.RgbResolution640x480Fps30);
            kinectSensor.ColorFrameReady += kinectRuntime_ColorFrameReady;

            kinectSensor.DepthStream.Enable(DepthImageFormat.Resolution320x240Fps30);
            kinectSensor.DepthFrameReady += kinectSensor_DepthFrameReady;

            kinectSensor.SkeletonStream.Enable(new TransformSmoothParameters
            {
                Smoothing          = 0.5f,
                Correction         = 0.5f,
                Prediction         = 0.5f,
                JitterRadius       = 0.05f,
                MaxDeviationRadius = 0.04f
            });
            kinectSensor.SkeletonFrameReady += kinectRuntime_SkeletonFrameReady;

            swipeGestureRecognizer = new SwipeGestureDetector();
            swipeGestureRecognizer.OnGestureDetected += OnGestureDetected;

            tPostureDetector = new AlgorithmicPostureDetector();
            tPostureDetector.PostureDetected += templatePostureDetector_PostureDetected;

            skeletonDisplayManager = new SkeletonDisplayManager(kinectSensor, kinectCanvas);

            kinectSensor.Start();

            LoadCircleGestureDetector();
            LoadLetterTPostureDetector();

            //nuiCamera = new BindableNUICamera(kinectSensor);

            elevationSlider.DataContext = nuiCamera;

            voiceCommander = new VoiceCommander("record", "stop");
            voiceCommander.OrderDetected += voiceCommander_OrderDetected;

            StartVoiceCommander();

            kinectDisplay.DataContext = colorManager;

            parallelCombinedGestureDetector = new ParallelCombinedGestureDetector();
            parallelCombinedGestureDetector.OnGestureDetected += OnGestureDetected;
            parallelCombinedGestureDetector.Add(circleGestureRecognizer);
            parallelCombinedGestureDetector.Add(swipeGestureRecognizer);
        }
Ejemplo n.º 2
0
    private GestureDetector InitTrain(string name, bool isUseRel)
    {
        m_tagCount = Mathf.Max(m_skRecorder.m_tagList.ToArray());
        GestureDetector gd = new GestureDetector();

        gd.m_name             = name;
        gd.m_jointIdx         = (Joint[])m_jointIdx.Clone();
        gd.m_thresMulPerPost  = new float[m_tagCount];
        gd.m_thresMulPerJoint = new float[JointCount];

        PostureDetector[] pd = new PostureDetector[m_tagCount];
        gd.m_posts = pd;
        for (int i = 0; i < m_tagCount; i++)
        {
            gd.m_thresMulPerPost[i] = 1;
            pd[i]        = new PostureDetector();
            pd[i].m_name = name + '_' + (i + 1);

            JointMatcher[] jm = new JointMatcher[JointCount];
            pd[i].m_jointMatcher = jm;
            for (int j = 0; j < JointCount; j++)
            {
                if (i == 0 || !isUseRel)
                {
                    jm[j] = new JointMatcher();
                }
                else
                {
                    jm[j] = new JointMatcherR();
                    ((JointMatcherR)jm[j]).SetLastPostJoint(gd, i, j);
                }
                jm[j].m_jointIdx         = m_jointIdx[j];
                gd.m_thresMulPerJoint[j] = 1;
            }
        }

        return(gd);
    }
Ejemplo n.º 3
0
    private bool DetectDynamic()
    {
        if (m_isDetected && m_frameIntv > 0)
        {
            PostureDetector endPosDet = m_posts[PostureCount - 1];
            float           lastScore = endPosDet.Score;
            //endPosDet.m_isCheckRise = false;
            if (endPosDet.Detect(false) && endPosDet.Score / lastScore > .5)
            {
                m_frameIntvCounter = (m_frameIntvCounter + 1) % (m_isStartCont ? m_frameIntv : m_frameWait);
                if (m_frameIntvCounter == 0)
                {
                    m_isStartCont = true;
                    return(true);
                }
                else
                {
                    return(false);
                }
            }
            else
            {
                m_frameIntvCounter = 0;
                //endPosDet.m_isCheckRise = true;
                m_isStartCont = false;
            }
        }

        m_isPostDet[CurState] = m_posts[CurState].Detect(true);
        if (CurState == 1)
        {
            m_isPostDet[0] = m_posts[0].Detect(false);
        }
        m_isDetected = m_fsm.Update(m_isPostDet);
        return(m_isDetected);
    }
        /// <summary>
        /// Serializes an array of Kinect skeletons into an array of JSON skeletons.
        /// </summary>
        /// <param name="bodies">The Kinect bodies.</param>
        /// <param name="mapper">The coordinate mapper.</param>
        /// <param name="faceFrameResults">The kinect faces.</param>
        /// <returns>A JSON representation of the skeletons.</returns>
        public static string Serialize(this List <Body> bodies, CoordinateMapper mapper, FaceFrameResult[] faceFrameResults)
        {
            JSONBodyCollection jsonBodies = new JSONBodyCollection {
                Bodies = new List <JSONBody>()
            };

            foreach (Body body in bodies)
            {
                JSONBody jsonBody = new JSONBody
                {
                    ID     = body.TrackingId.ToString(),
                    Joints = new List <JSONJoint>()
                };

                foreach (KeyValuePair <JointType, Joint> jointpair in body.Joints)
                {
                    Joint joint = jointpair.Value;

                    DepthSpacePoint depthPoint = mapper.MapCameraPointToDepthSpace(joint.Position);

                    jsonBody.Joints.Add(new JSONJoint
                    {
                        Name = joint.JointType.ToString().ToLower(),
                        MapX = depthPoint.X,
                        MapY = depthPoint.Y,
                        MapZ = joint.Position.Z,
                        X    = body.Joints[joint.JointType].Position.X,
                        Y    = body.Joints[joint.JointType].Position.Y,
                        Z    = body.Joints[joint.JointType].Position.Z,

                        // absolute
                        Quaternion_W = body.JointOrientations[joint.JointType].Orientation.W,
                        Quaternion_X = body.JointOrientations[joint.JointType].Orientation.X,
                        Quaternion_Y = body.JointOrientations[joint.JointType].Orientation.Y,
                        Quaternion_Z = body.JointOrientations[joint.JointType].Orientation.Z,

                        IsTracked = (body.Joints[joint.JointType].TrackingState == TrackingState.Tracked)
                    });
                }

                // faceとbodyの関連付け
                FaceFrameResult associatedFace = null;
                foreach (var f in faceFrameResults)
                {
                    if (f == null)
                    {
                        continue;
                    }
                    if (f.TrackingId == body.TrackingId)
                    {
                        associatedFace = f;
                        break;
                    }
                }
                if (associatedFace != null)
                {
                    jsonBody.Face = new JSONFace
                    {
                        Quaternion_W = associatedFace.FaceRotationQuaternion.W,
                        Quaternion_X = associatedFace.FaceRotationQuaternion.X,
                        Quaternion_Y = associatedFace.FaceRotationQuaternion.Y,
                        Quaternion_Z = associatedFace.FaceRotationQuaternion.Z,

                        MouthOpened    = (associatedFace.FaceProperties[FaceProperty.MouthOpen] == DetectionResult.Maybe || associatedFace.FaceProperties[FaceProperty.MouthOpen] == DetectionResult.Yes),
                        MouthMoved     = (associatedFace.FaceProperties[FaceProperty.MouthMoved] == DetectionResult.Maybe || associatedFace.FaceProperties[FaceProperty.MouthMoved] == DetectionResult.Yes),
                        LeftEyeClosed  = (associatedFace.FaceProperties[FaceProperty.LeftEyeClosed] == DetectionResult.Maybe || associatedFace.FaceProperties[FaceProperty.LeftEyeClosed] == DetectionResult.Yes),
                        RightEyeClosed = (associatedFace.FaceProperties[FaceProperty.RightEyeClosed] == DetectionResult.Maybe || associatedFace.FaceProperties[FaceProperty.RightEyeClosed] == DetectionResult.Yes)
                    };
                }

                // 立っている, 座っている, 寝ている の判定
                int posture = PostureDetector.Detect(body);
                jsonBody.Posture = posture;

                jsonBodies.Bodies.Add(jsonBody);
            }

            return(Serialize(jsonBodies));
        }