Beispiel #1
0
        /// <summary>
        /// Draws a bone line between two joints
        /// </summary>
        /// <param name="skeleton">skeleton to draw bones from</param>
        /// <param name="drawingContext">drawing context to draw to</param>
        /// <param name="jointType0">joint to start drawing from</param>
        /// <param name="jointType1">joint to end drawing at</param>
        private void DrawBone(DrawingContext dc, Skeleton skeleton, JointType jointType0, JointType jointType1)
        {
            Joint joint0 = skeleton.Joints[jointType0];
            Joint joint1 = skeleton.Joints[jointType1];

            // If we can't find either of these joints, exit
            if (joint0.TrackingState == JointTrackingState.NotTracked ||
                joint1.TrackingState == JointTrackingState.NotTracked)
            {
                return;
            }

            // Don't draw if both points are inferred
            if (joint0.TrackingState == JointTrackingState.Inferred &&
                joint1.TrackingState == JointTrackingState.Inferred)
            {
                return;
            }

            // We assume all drawn bones are inferred unless BOTH joints are tracked
            Pen drawPen = this.InferredBonePen;

            if (joint0.TrackingState == JointTrackingState.Tracked && joint1.TrackingState == JointTrackingState.Tracked)
            {
                drawPen = this.TrackedBonePen;
            }

            dc.DrawLine(drawPen, RoomSetting.CameraPointToObservePoint(SkeletonPointToCameraPoint(joint0.Position)), RoomSetting.CameraPointToObservePoint(SkeletonPointToCameraPoint(joint1.Position)));
        }
        /*
         * private const int DMAFilterN = 3;
         * private Filter DMAFilterX = new DMAFilter(DMAFilterN);
         * private Filter DMAFilterY = new DMAFilter(DMAFilterN);
         *
         * private const int MedianFilterN = 4;
         * private Filter MedianFilterX = new MedianFilter(MedianFilterN);
         * private Filter MedianFilterY = new MedianFilter(MedianFilterN);
         */
        /// <summary>
        /// draw output points on the screen
        /// </summary>
        private void DrawOutput()
        {
            using (DrawingContext dc = this.outputDrawingGroup.Open())
            {
                dc.DrawImage(blankColorBitmap, new Rect(0.0, 0.0, RenderWidth, RenderHeight));
                RoomSetting.PaintPlatesAndCoordinates(dc);

                foreach (Player player in this.players)
                {
                    player.DrawSkeleton(dc);

                    if (player.headAndHandValid == true)
                    {
                        SpacePoint intersection = RoomSetting.FindTheIntersection(RoomSetting.CameraPointToRoomPoint(player.startPointInCameraCoordinates),
                                                                                  RoomSetting.CameraPointToRoomPoint(player.endPointInCameraCoordinates));

                        if (intersection != null)
                        {
                            Point showPoint = RoomSetting.RoomPointToObservePoint(intersection);
                            dc.DrawLine(new Pen(player.color, 2), RoomSetting.CameraPointToObservePoint(player.startPointInCameraCoordinates),
                                        showPoint);

                            if (showPoint.X >= 0 && showPoint.X < RenderWidth && showPoint.Y >= 0 && showPoint.Y < RenderHeight)
                            {
                                dc.DrawEllipse(player.color, null, showPoint, this.PointThickness, this.PointThickness);
                            }
                            else
                            {
                                RenderClippedEdges(showPoint, dc);
                            }
                        }
                    }
                }
            }
        }
Beispiel #3
0
        /// <summary>
        ///  Called to update point info
        /// </summary>
        public void AnalyzeHeadAndHands()
        {
            if (skeleton.Joints[JointType.HandLeft].TrackingState == JointTrackingState.Tracked)
            {
                //System.Diagnostics.Debug.WriteLine(RoomSetting.CameraPointToRoomPoint(this.painter.SkeletonPointToCameraPoint(skeleton.Joints[JointType.HandLeft].Position)).Z);

                if (RoomSetting.CameraPointToRoomPoint(this.painter.SkeletonPointToCameraPoint(skeleton.Joints[JointType.HandLeft].Position)).Z > 1200)
                {
                    RoomSetting.move = true;
                }
                else
                {
                    RoomSetting.move = false;
                }
            }

            // If we can't find either head or right hand, exit
            if (skeleton.Joints[JointType.Head].TrackingState == JointTrackingState.NotTracked ||
                skeleton.Joints[JointType.HandRight].TrackingState == JointTrackingState.NotTracked)
            {
                this.headAndHandValid = false;
                return;
            }

            // Don't analyze if both points are inferred
            if (skeleton.Joints[JointType.Head].TrackingState == JointTrackingState.Inferred &&
                skeleton.Joints[JointType.HandRight].TrackingState == JointTrackingState.Inferred)
            {
                this.headAndHandValid = false;
                return;
            }

            this.headAndHandValid = true;
            //this.startPointInColorFrame = this.painter.SkeletonPointToScreen(skeleton.Joints[JointType.Head].Position);
            this.startPointInCameraCoordinates = this.painter.SkeletonPointToCameraPoint(skeleton.Joints[JointType.Head].Position);
            //this.endPointInColorFrame = this.painter.SkeletonPointToScreen(skeleton.Joints[JointType.HandRight].Position);
            this.endPointInCameraCoordinates = this.painter.SkeletonPointToCameraPoint(skeleton.Joints[JointType.HandRight].Position);
        }
        /// <summary>
        /// Execute startup tasks
        /// </summary>
        /// <param name="sender">object sending the event</param>
        /// <param name="e">event arguments</param>
        private void WindowLoaded(object sender, RoutedEventArgs e)
        {
            // Look through all sensors and start the first connected one.
            // This requires that a Kinect is connected at the time of app startup.
            // To make your app robust against plug/unplug,
            // it is recommended to use KinectSensorChooser provided in Microsoft.Kinect.Toolkit (See components in Toolkit Browser).
            foreach (var potentialSensor in KinectSensor.KinectSensors)
            {
                if (potentialSensor.Status == KinectStatus.Connected)
                {
                    this.sensor = potentialSensor;
                    break;
                }
            }

            if (this.sensor != null)
            {
                // Turn on the skeleton, color, depth stream to receive skeleton frames
                TransformSmoothParameters smoothingParam = new TransformSmoothParameters();
                {
                    smoothingParam.Smoothing          = 0.5f;
                    smoothingParam.Correction         = 0.1f;
                    smoothingParam.Prediction         = 0.5f;
                    smoothingParam.JitterRadius       = 0.1f;
                    smoothingParam.MaxDeviationRadius = 0.1f;
                };
                this.sensor.SkeletonStream.Enable(smoothingParam);

                //this.sensor.SkeletonStream.Enable();

                this.sensor.SkeletonStream.TrackingMode = SkeletonTrackingMode.Seated;
                this.checkBoxSeatedMode.SetCurrentValue(CheckBox.IsCheckedProperty, true);

                this.sensor.ColorStream.Enable(ColorImageFormat.RgbResolution640x480Fps30);
                this.sensor.DepthStream.Enable(DepthImageFormat.Resolution640x480Fps30);

                this.RenderHeight     = 480;
                this.RenderWidth      = 640;
                this.depthImageFormat = this.sensor.DepthStream.Format;
                this.colorImageFormat = this.sensor.ColorStream.Format;

                // Create the drawing group we'll use for drawing
                this.drawingGroup = new DrawingGroup();
                this.drawingGroup.ClipGeometry = new RectangleGeometry(new Rect(0.0, 0.0, RenderWidth, RenderHeight));

                this.outputDrawingGroup = new DrawingGroup();
                this.outputDrawingGroup.ClipGeometry = new RectangleGeometry(new Rect(0.0, 0.0, RenderWidth, RenderHeight));

                // Display the drawing using our image control
                Image.Source = new DrawingImage(this.drawingGroup);
                // Allocate space to put the pixels we'll receive
                this.colorImage = new byte[this.sensor.ColorStream.FramePixelDataLength];
                // This is the bitmap we'll display on-screen
                this.colorBitmap      = new WriteableBitmap(this.sensor.ColorStream.FrameWidth, this.sensor.ColorStream.FrameHeight, 96.0, 96.0, PixelFormats.Bgr32, null);
                this.blankColorBitmap = new WriteableBitmap(this.sensor.ColorStream.FrameWidth, this.sensor.ColorStream.FrameHeight, 96.0, 96.0, PixelFormats.Bgr32, null);

                OutputImage.Source = new DrawingImage(this.outputDrawingGroup);

                RoomSetting.SetCameraMatrix();

                RoomSetting.SetPlates();

                // Add an event handler to be called whenever there is new all frame data
                this.sensor.AllFramesReady += this.OnAllFramesReady;

                // Start the sensor!
                try
                {
                    this.sensor.Start();
                }
                catch (IOException)
                {
                    this.sensor = null;
                }
            }

            if (null == this.sensor)
            {
                this.statusBarText.Text = Properties.Resources.NoKinectReady;
            }
        }