public DTWImplementation(KinectSensor s, SkeletonTracker st)
        {
            _dtw = new DtwGestureRecognizer(12, 0.6, 2, 2, 10);
            _video = new ArrayList();
            _skeleton = st;
            kinect = s;
            System.IO.FileInfo fi = new System.IO.FileInfo(GestureSaveFileLocation);
            LoadGesturesFromFile(fi.FullName);

            s.SkeletonFrameReady += new EventHandler<SkeletonFrameReadyEventArgs>(DTWSkeletonReady);
            Skeleton2DDataExtract.Skeleton2DdataCoordReady += NuiSkeleton2DdataCoordReady;
        }
Exemplo n.º 2
0
        /// <summary>
        /// Runs after the window is loaded
        /// </summary>
        /// <param name="sender">The sender object</param>
        /// <param name="e">Routed Event Args</param>
        private void WindowLoaded(object sender, RoutedEventArgs e)
        {
            _nui = (from i in KinectSensor.KinectSensors
                    where i.Status == KinectStatus.Connected
                    select i).FirstOrDefault();

            if (_nui == null)
            {
                System.Windows.MessageBox.Show("No Kinects Connected!!!");
                System.Windows.Forms.Application.Exit();
                //  throw new NotSupportedException("No kinectes connected!");
            }
            try
            {
                _nui.DepthStream.Enable(DepthImageFormat.Resolution320x240Fps30);
                _nui.SkeletonStream.Enable();
                _nui.ColorStream.Enable(ColorImageFormat.RgbResolution640x480Fps30);
                _nui.Start();
            }
            catch (InvalidOperationException)
            {
                System.Windows.MessageBox.Show("Runtime initialization failed. Please make sure Kinect device is plugged in.");
                return;
            }

            _lastTime = DateTime.Now;

            _dtw   = new DtwGestureRecognizer(12, 0.6, 2, 2, 10);
            _video = new ArrayList();

            // If you want to see the depth image and frames per second then include this
            // I'mma turn this off 'cos my 'puter is proper slow
            //_nui.DepthFrameReady += NuiDepthFrameReady;

            _nui.SkeletonFrameReady += NuiSkeletonFrameReady;
            _nui.SkeletonFrameReady += SkeletonExtractSkeletonFrameReady;

            // If you want to see the RGB stream then include this
            //_nui.ColorFrameReady += NuiColorFrameReady;

            Skeleton2DDataExtract.Skeleton2DdataCoordReady += NuiSkeleton2DdataCoordReady;

            // Update the debug window with Sequences information
            dtwTextOutput.Text = _dtw.RetrieveText();

            Debug.WriteLine("Finished Window Loading");
        }
 private void WindowLoaded(object sender, RoutedEventArgs e)
 {
     //kinect discovery and initialization
     DiscoverKinectSensor();
     _dtw = new DtwGestureRecognizer(12, 0.6, 2, 2, 10);
     _video = new ArrayList();
 }
Exemplo n.º 4
0
        /// <summary>
        /// Runs after the window is loaded
        /// </summary>
        /// <param name="sender">The sender object</param>
        /// <param name="e">Routed Event Args</param>
        private void WindowLoaded(object sender, RoutedEventArgs e)
        {
            //_nui = new Runtime();
            sensor = (from sensorToCheck in KinectSensor.KinectSensors
                      where sensorToCheck.Status == KinectStatus.Connected
                      select sensorToCheck).FirstOrDefault();
            try
            {
                sensor.Start();
            }
            catch (Exception)
            {
                System.Windows.MessageBox.Show(
                    "Failed at sensor.Start");
                return;
            }
            try
            {
                sensor.ColorStream.Enable();
                sensor.DepthStream.Enable();
                sensor.SkeletonStream.Enable();
            }
            catch (InvalidOperationException)
            {
                System.Windows.MessageBox.Show(
                    "Failed to open stream. Please make sure to specify a supported image type and resolution.");
                return;
            }

            _lastTime = DateTime.Now;

            _dtw = new DtwGestureRecognizer(12, 0.6, 2, 2, 10);
            _video = new ArrayList();

            // If you want to see the depth image and frames per second then include this
            // I'mma turn this off 'cos my 'puter is proper slow
            //sensor.DepthFrameReady += NuiDepthFrameReady;

            sensor.SkeletonFrameReady += NuiSkeletonFrameReady;
            sensor.SkeletonFrameReady += SkeletonExtractSkeletonFrameReady;

            // If you want to see the RGB stream then include this
            //sensor.ColorFrameReady += NuiColorFrameReady;

            Skeleton2DDataExtract.Skeleton2DdataCoordReady += NuiSkeleton2DdataCoordReady;

            // Update the debug window with Sequences information
            dtwTextOutput.Text = _dtw.RetrieveText();

            Debug.WriteLine("Finished Window Loading");
        }
        /// <summary>
        /// Runs after the window is loaded
        /// </summary>
        /// <param name="sender">The sender object</param>
        /// <param name="e">Routed Event Args</param>
        private void WindowLoaded(object sender, RoutedEventArgs e)
        {
            _nui = (from i in KinectSensor.KinectSensors
                    where i.Status == KinectStatus.Connected
                    select i).FirstOrDefault();

            if (_nui == null)
                throw new NotSupportedException("No kinects connected!");

            try
            {
                _nui.DepthStream.Enable(DepthImageFormat.Resolution320x240Fps30);
                _nui.SkeletonStream.Enable();
                _nui.ColorStream.Enable(ColorImageFormat.RgbResolution640x480Fps30);
                _nui.Start();
            }
            catch (InvalidOperationException)
            {
                System.Windows.MessageBox.Show("Runtime initialization failed. Please make sure Kinect device is plugged in.");
                return;
            }

            _lastTime = DateTime.Now;

            _dtw = new DtwGestureRecognizer(12, 0.6, 2, 2, 10);
            _video = new ArrayList();

            // If you want to see the depth image and frames per second then include this
            // I'mma turn this off 'cos my 'puter is proper slow
            _nui.DepthFrameReady += NuiDepthFrameReady;

            _nui.SkeletonFrameReady += NuiSkeletonFrameReady;
            _nui.SkeletonFrameReady += SkeletonExtractSkeletonFrameReady;

            // If you want to see the RGB stream then include this
            _nui.ColorFrameReady += NuiColorFrameReady;

            Skeleton2DDataExtract.Skeleton2DdataCoordReady += NuiSkeleton2DdataCoordReady;

            // Update the debug window with Sequences information
            //dtwTextOutput.Text = _dtw.RetrieveText();

            Debug.WriteLine("Finished Window Loading");
        }
        private void InitRuntime()
        {
            //Some Runtimes' status will be NotPowered, or some other error state. Only want to Initialize the runtime, if it is connected.
            if (_Kinect.Status == KinectStatus.Connected)
            {
                bool skeletalViewerAvailable = IsSkeletalViewerAvailable;

                // NOTE:  Skeletal tracking only works on one Kinect per process right now.
                RuntimeOptions = skeletalViewerAvailable ?
                                     RuntimeOptions.UseDepthAndPlayerIndex | RuntimeOptions.UseSkeletalTracking | RuntimeOptions.UseColor
                                     : RuntimeOptions.UseDepth | RuntimeOptions.UseColor;
                _Kinect.Initialize(RuntimeOptions);
                skeletonPanel.Visibility = skeletalViewerAvailable ? System.Windows.Visibility.Visible : System.Windows.Visibility.Collapsed;
                if (RuntimeOptions.HasFlag(RuntimeOptions.UseSkeletalTracking))
                {
                    _Kinect.SkeletonEngine.TransformSmooth = true;
                }
            }
            _lastTime = DateTime.Now;

           
            _dtw = new DtwGestureRecognizer(12, 0.6, 2, 2, 10);
            _video = new ArrayList();
            LoadGesturesFromFile(GestureFileRecord);

            Skeleton2DDataExtract.Skeleton2DdataCoordReady += NuiSkeleton2DdataCoordReady;


          
        }
Exemplo n.º 7
0
        /// <summary>
        /// Runs after the window is loaded
        /// </summary>
        /// <param name="sender">The sender object</param>
        /// <param name="e">Routed Event Args</param>
        private void WindowLoaded(object sender, RoutedEventArgs e)
        {
            _nui = new Runtime();

            try
            {
                _nui.Initialize(RuntimeOptions.UseDepthAndPlayerIndex | RuntimeOptions.UseSkeletalTracking |
                               RuntimeOptions.UseColor);
            }
            catch (InvalidOperationException)
            {
                System.Windows.MessageBox.Show("Runtime initialization failed. Please make sure Kinect device is plugged in.");
                return;
            }

            try
            {
                _nui.VideoStream.Open(ImageStreamType.Video, 2, ImageResolution.Resolution640x480, ImageType.Color);
                _nui.DepthStream.Open(ImageStreamType.Depth, 2, ImageResolution.Resolution320x240, ImageType.DepthAndPlayerIndex);
            }
            catch (InvalidOperationException)
            {
                System.Windows.MessageBox.Show(
                    "Failed to open stream. Please make sure to specify a supported image type and resolution.");
                return;
            }

            _lastTime = DateTime.Now;

            _dtw = new DtwGestureRecognizer(12, 0.6, 2, 2, 10);
            _video = new ArrayList();

            // If you want to see the depth image and frames per second then include this
            // I'mma turn this off 'cos my 'puter is proper slow
            _nui.DepthFrameReady += NuiDepthFrameReady;

            _nui.SkeletonFrameReady += NuiSkeletonFrameReady;
            _nui.SkeletonFrameReady += SkeletonExtractSkeletonFrameReady;

            // If you want to see the RGB stream then include this
            _nui.VideoFrameReady += NuiColorFrameReady;

            Skeleton2DDataExtract.Skeleton2DdataCoordReady += NuiSkeleton2DdataCoordReady;

            // Update the debug window with Sequences information
            dtwTextOutput.Text = _dtw.RetrieveText();

            Debug.WriteLine("Finished Window Loading");
        }
        void MainWindow_Loaded(object sender, RoutedEventArgs e)
        {
            InitializeButtons();

            // Setup osc sender
            oscArgs[0] = "127.0.0.1";
            oscArgs[1] = "3333";
            oscWriter = new UdpWriter(oscArgs[0], Convert.ToInt32(oscArgs[1]));

            //Since only a color video stream is needed, RuntimeOptions.UseColor is used.
            _runtime.Initialize(RuntimeOptions.UseDepthAndPlayerIndex | Microsoft.Research.Kinect.Nui.RuntimeOptions.UseColor | RuntimeOptions.UseSkeletalTracking);
            _runtime.SkeletonEngine.TransformSmooth = true;

            //Use to transform and reduce jitter
            _runtime.SkeletonEngine.SmoothParameters = new TransformSmoothParameters
            {
                Smoothing = 0.5f,
                Correction = 0.3f,
                Prediction = 0.4f,
                JitterRadius = 0.05f,
                MaxDeviationRadius = 0.04f
            };

            try
            {
                _runtime.VideoStream.Open(ImageStreamType.Video, 2, ImageResolution.Resolution640x480, ImageType.Color);
                _runtime.DepthStream.Open(ImageStreamType.Depth, 2, ImageResolution.Resolution320x240, ImageType.DepthAndPlayerIndex);
            }
            catch (InvalidOperationException)
            {
                System.Windows.MessageBox.Show(
                    "Failed to open stream. Please make sure to specify a supported image type and resolution.");
                return;
            }
            _lastTime = DateTime.Now;

            _dtw = new DtwGestureRecognizer(18, 0.6, 2, 2, 10);
            _video = new ArrayList();

            //// If you want to see the depth image and frames per second then include this
            //// I'mma turn this off 'cos my 'puter is proper slow
            _runtime.DepthFrameReady += NuiDepthFrameReady;

            _runtime.SkeletonFrameReady += NuiSkeletonFrameReady;
            _runtime.SkeletonFrameReady += SkeletonExtractSkeletonFrameReady;

            //// If you want to see the RGB stream then include this
            //_runtime.VideoFrameReady += NuiColorFrameReady;

            Skeleton3DDataExtract.Skeleton3DdataCoordReady += NuiSkeleton3DdataCoordReady;

            speechRecognizer = SpeechRecognizer.Create();         //returns null if problem with speech prereqs or instantiation.
            if (speechRecognizer != null)
            {
                speechRecognizer.Start(new KinectAudioSource());  //KinectSDK TODO: expose Runtime.AudioSource to return correct audiosource.
                speechRecognizer.SaidSomething += new EventHandler<SpeechRecognizer.SaidSomethingEventArgs>(recognizer_SaidSomething);
            }
            else
            {
                dtwTextOutput.Text = "No Speech";
                speechRecognizer = null;
            }
        }
        /// <summary>
        /// Runs after the window is loaded
        /// </summary>
        /// <param name="sender">The sender object</param>
        /// <param name="e">Routed Event Args</param>
        private void WindowLoaded(object sender, RoutedEventArgs e)
        {
            _nui = (from i in KinectSensor.KinectSensors
                    where i.Status == KinectStatus.Connected
                    select i).FirstOrDefault();

            if (_nui == null)
                throw new NotSupportedException("No kinects connected!");

            try
            {
                //Smoothing "good for gesture recognition"
                //http://msdn.microsoft.com/en-us/library/jj131024.aspx
                TransformSmoothParameters smoothingParam = new TransformSmoothParameters();
                {
                    smoothingParam.Smoothing = 0.5f;
                    smoothingParam.Correction = 0.5f;
                    smoothingParam.Prediction = 0.5f;
                    smoothingParam.JitterRadius = 0.05f;
                    smoothingParam.MaxDeviationRadius = 0.04f;
                };
                //_nui.DepthStream.Enable(DepthImageFormat.Resolution320x240Fps30);
                _nui.SkeletonStream.Enable(smoothingParam);
                //_nui.ColorStream.Enable(ColorImageFormat.RgbResolution640x480Fps30);
                _nui.Start();
            }
            catch (InvalidOperationException)
            {
                System.Windows.MessageBox.Show("Runtime initialization failed. Please make sure Kinect device is plugged in.");
                return;
            }

            _lastTime = DateTime.Now;

            _dtw = new DtwGestureRecognizer(12, 0.6, 2, 2, 10);
            _video = new ArrayList();

            // If you want to see the depth image and frames per second then include this
            // I'mma turn this off 'cos my 'puter is proper slow
            _nui.DepthFrameReady += NuiDepthFrameReady;

            _nui.SkeletonFrameReady += NuiSkeletonFrameReady; //Canvas update
            _nui.SkeletonFrameReady += SkeletonExtractSkeletonFrameReady; //Data processing

            // If you want to see the RGB stream then include this
            _nui.ColorFrameReady += NuiColorFrameReady;

            Skeleton2DDataExtract.Skeleton2DdataCoordReady += NuiSkeleton2DdataCoordReady;

            // Update the debug window with Sequences information
            //dtwTextOutput.Text = _dtw.RetrieveText();

            Debug.WriteLine("Finished Window Loading");
        }
Exemplo n.º 10
0
        private void KinectSensorChooser_KinectSensorChanged(object sender, DependencyPropertyChangedEventArgs e)
        {
            KinectSensor oldSensor = (KinectSensor)e.OldValue;
            StopKinect(oldSensor);

            KinectSensor newSensor = (KinectSensor)e.NewValue;
            if (newSensor == null)
            {
                return;
            }

            cursor.Visibility = Visibility.Visible;
            sensorAudio = newSensor.AudioSource; // [WL] For audio recording.

            var parameters = new TransformSmoothParameters
            {
                Smoothing = 0.3f,
                Correction = 0.2f,
                Prediction = 0.1f,
                JitterRadius = 0.5f,
                MaxDeviationRadius = 0.2f
            };

            newSensor.ColorStream.Enable();
            newSensor.DepthStream.Enable();
            newSensor.SkeletonStream.Enable(parameters);
            newSensor.AllFramesReady += new EventHandler<AllFramesReadyEventArgs>(newSensor_AllFramesReady);

            _dtw = new DtwGestureRecognizer(12, 0.6, 2, 2, 10);
            _video = new ArrayList();
            newSensor.SkeletonFrameReady += SkeletonExtractSkeletonFrameReady;
            //Skeleton2DDataExtract.Skeleton2DdataCoordReady += NuiSkeleton2DdataCoordReady;            
            LoadGesturesFromFile("C:\\Users\\whitlai\\Desktop\\CS160\\CS160_FinalProj_Framework\\CS160_FinalProj_Framework\\RecordedGesturesPrototype.txt");  

            try
            {
                newSensor.Start();
            }
            catch (System.IO.IOException)
            {
                KinectSensorChooser.AppConflictOccurred();
            }
        }
Exemplo n.º 11
0
        /// <summary>
        /// Called each time a skeleton frame is ready. Passes skeletal data to the DTW processor 每次骨架框准备好时调用。 将骨架数据传递给DTW处理器
        /// </summary>
        /// <param name="sender">The sender object</param>
        /// <param name="e">Skeleton Frame Ready Event Args</param>
        //private static void SkeletonExtractSkeletonFrameReady(object sender, SkeletonFrameReadyEventArgs e)
        //{
        //    SkeletonFrame skeletonFrame = e.OpenSkeletonFrame();
        //    if (skeletonFrame != null)
        //    {
        //        Skeleton[] skeleton_array = new Skeleton[skeletonFrame.SkeletonArrayLength];
        //        skeletonFrame.CopySkeletonDataTo(skeleton_array);
        //        foreach (Skeleton data in skeleton_array)
        //        {
        //            if (data != null)
        //            {
        //                Skeleton2DDataExtract.ProcessData(data);
        //            }
        //        }
        //    }
        //}

        /// <summary>
        /// Called when each depth frame is ready
        /// </summary>
        /// <param name="sender">The sender object</param>
        /// <param name="e">Depth Image Frame Ready Event Args</param>
        //private void NuiDepthFrameReady(object sender, DepthImageFrameReadyEventArgs e)
        //{
        //    using (DepthImageFrame depthFrame = e.OpenDepthImageFrame())
        //    {
        //        if (depthFrame != null)
        //        {
        //            // Copy the pixel data from the image to a temporary array
        //            depthFrame.CopyDepthImagePixelDataTo(this.depthPixels);

        //            // Get the min and max reliable depth for the current frame
        //            int minDepth = depthFrame.MinDepth;
        //            int maxDepth = depthFrame.MaxDepth;

        //            // Convert the depth to RGB
        //            int colorPixelIndex = 0;
        //            for (int i = 0; i < this.depthPixels.Length; ++i)
        //            {
        //                // Get the depth for this pixel
        //                short depth = depthPixels[i].Depth;

        //                // To convert to a byte, we're discarding the most-significant
        //                // rather than least-significant bits.
        //                // We're preserving detail, although the intensity will "wrap."
        //                // Values outside the reliable depth range are mapped to 0 (black).
        //                //要转换为一个字节,我们要丢弃最重要的而不是最不重要的位。我们保留细节,尽管强度将“包裹”。 超出可靠深度范围的值将映射为0(黑色)。
        //                // Note: Using conditionals in this loop could degrade performance.
        //                // Consider using a lookup table instead when writing production code.
        //                // See the KinectDepthViewer class used by the KinectExplorer sample
        //                // for a lookup table example.
        //                // 注意:在此循环中使用条件可能会降低性能。在编写生产代码时,请考虑使用查找表。请参阅KinectExplorer示例用于查找表示例的KinectDepthViewer类。
        //                byte intensity = (byte)(depth >= minDepth && depth <= maxDepth ? depth : 0);

        //                // Write out blue byte
        //                this.depthcolorPixels[colorPixelIndex++] = intensity;

        //                // Write out green byte
        //                this.depthcolorPixels[colorPixelIndex++] = intensity;

        //                // Write out red byte
        //                this.depthcolorPixels[colorPixelIndex++] = intensity;

        //                // We're outputting BGR, the last byte in the 32 bits is unused so skip it
        //                // If we were outputting BGRA, we would write alpha here.
        //                ++colorPixelIndex;
        //            }

        //            // Write the pixel data into our bitmap
        //            this.depthBitmap.WritePixels(
        //                new Int32Rect(0, 0, this.depthBitmap.PixelWidth, this.depthBitmap.PixelHeight),
        //                this.depthcolorPixels,
        //                this.depthBitmap.PixelWidth * sizeof(int),
        //                0);
        //        }
        //    }

        //    ++_totalFrames;

        //    DateTime cur = DateTime.Now;
        //    if (cur.Subtract(_lastTime) > TimeSpan.FromSeconds(1))
        //    {
        //        int frameDiff = _totalFrames - _lastFrames;
        //        _lastFrames = _totalFrames;
        //        _lastTime = cur;
        //        frameRate.Text = frameDiff + " fps";
        //    }
        //}

        /// <summary>
        /// Draws indicators to show which edges are clipping skeleton data 绘制指示器以显示哪些边是剪切骨架数据
        /// </summary>
        /// <param name="skeleton">skeleton to draw clipping information for</param>
        /// <param name="drawingContext">drawing context to draw to</param>
        //private static void RenderClippedEdges(Skeleton skeleton, DrawingContext drawingContext)
        //{
        //    if (skeleton.ClippedEdges.HasFlag(FrameEdges.Bottom))
        //    {
        //        drawingContext.DrawRectangle(
        //            Brushes.Red,
        //            null,
        //            new Rect(0, RenderHeight - ClipBoundsThickness, RenderWidth, ClipBoundsThickness));
        //    }

        //    if (skeleton.ClippedEdges.HasFlag(FrameEdges.Top))
        //    {
        //        drawingContext.DrawRectangle(
        //            Brushes.Red,
        //            null,
        //            new Rect(0, 0, RenderWidth, ClipBoundsThickness));
        //    }

        //    if (skeleton.ClippedEdges.HasFlag(FrameEdges.Left))
        //    {
        //        drawingContext.DrawRectangle(
        //            Brushes.Red,
        //            null,
        //            new Rect(0, 0, ClipBoundsThickness, RenderHeight));
        //    }

        //    if (skeleton.ClippedEdges.HasFlag(FrameEdges.Right))
        //    {
        //        drawingContext.DrawRectangle(
        //            Brushes.Red,
        //            null,
        //            new Rect(RenderWidth - ClipBoundsThickness, 0, ClipBoundsThickness, RenderHeight));
        //    }
        //}

        /// <summary>
        /// Event handler for Kinect sensor's SkeletonFrameReady event Kinect传感器的SkeletonFrameReady事件的事件处理程序
        /// </summary>
        /// <param name="sender">object sending the event</param>
        /// <param name="e">event arguments</param>
        //private void SensorSkeletonFrameReady(object sender, SkeletonFrameReadyEventArgs e)
        //{
        //    Skeleton[] skeletons = new Skeleton[0];

        //    using (SkeletonFrame skeletonFrame = e.OpenSkeletonFrame())
        //    {
        //        if (skeletonFrame != null)
        //        {
        //            skeletons = new Skeleton[skeletonFrame.SkeletonArrayLength];
        //            skeletonFrame.CopySkeletonDataTo(skeletons);
        //        }
        //    }

        //    using (DrawingContext dc = this.skelDrawingGroup.Open())
        //    {
        //        // Draw a transparent background to set the render size
        //        dc.DrawRectangle(Brushes.Black, null, new Rect(0.0, 0.0, RenderWidth, RenderHeight));

        //        if (skeletons.Length != 0)
        //        {
        //            foreach (Skeleton skel in skeletons)
        //            {
        //                RenderClippedEdges(skel, dc);

        //                if (skel.TrackingState == SkeletonTrackingState.Tracked)
        //                {
        //                    this.DrawBonesAndJoints(skel, dc);
        //                }
        //                else if (skel.TrackingState == SkeletonTrackingState.PositionOnly)
        //                {
        //                    dc.DrawEllipse(
        //                    this.centerPointBrush,
        //                    null,
        //                    this.SkeletonPointToScreen(skel.Position),
        //                    BodyCenterThickness,
        //                    BodyCenterThickness);
        //                }
        //            }
        //        }

        //        // prevent drawing outside of our render area
        //        this.skelDrawingGroup.ClipGeometry = new RectangleGeometry(new Rect(0.0, 0.0, RenderWidth, RenderHeight));
        //    }
        //}

        /// <summary>
        /// Draws a skeleton's bones and joints 绘制骨架的骨骼和关节
        /// </summary>
        /// <param name="skeleton">skeleton to draw</param>
        /// <param name="drawingContext">drawing context to draw to</param>
        //private void DrawBonesAndJoints(Skeleton skeleton, DrawingContext drawingContext)
        //{
        //    // Render Torso
        //    this.DrawBone(skeleton, drawingContext, JointType.Head, JointType.ShoulderCenter);
        //    this.DrawBone(skeleton, drawingContext, JointType.ShoulderCenter, JointType.ShoulderLeft);
        //    this.DrawBone(skeleton, drawingContext, JointType.ShoulderCenter, JointType.ShoulderRight);
        //    this.DrawBone(skeleton, drawingContext, JointType.ShoulderCenter, JointType.Spine);
        //    this.DrawBone(skeleton, drawingContext, JointType.Spine, JointType.HipCenter);
        //    this.DrawBone(skeleton, drawingContext, JointType.HipCenter, JointType.HipLeft);
        //    this.DrawBone(skeleton, drawingContext, JointType.HipCenter, JointType.HipRight);

        //    // Left Arm
        //    this.DrawBone(skeleton, drawingContext, JointType.ShoulderLeft, JointType.ElbowLeft);
        //    this.DrawBone(skeleton, drawingContext, JointType.ElbowLeft, JointType.WristLeft);
        //    this.DrawBone(skeleton, drawingContext, JointType.WristLeft, JointType.HandLeft);

        //    // Right Arm
        //    this.DrawBone(skeleton, drawingContext, JointType.ShoulderRight, JointType.ElbowRight);
        //    this.DrawBone(skeleton, drawingContext, JointType.ElbowRight, JointType.WristRight);
        //    this.DrawBone(skeleton, drawingContext, JointType.WristRight, JointType.HandRight);

        //    // Left Leg
        //    this.DrawBone(skeleton, drawingContext, JointType.HipLeft, JointType.KneeLeft);
        //    this.DrawBone(skeleton, drawingContext, JointType.KneeLeft, JointType.AnkleLeft);
        //    this.DrawBone(skeleton, drawingContext, JointType.AnkleLeft, JointType.FootLeft);

        //    // Right Leg
        //    this.DrawBone(skeleton, drawingContext, JointType.HipRight, JointType.KneeRight);
        //    this.DrawBone(skeleton, drawingContext, JointType.KneeRight, JointType.AnkleRight);
        //    this.DrawBone(skeleton, drawingContext, JointType.AnkleRight, JointType.FootRight);

        //    // Render Joints
        //    foreach (Joint joint in skeleton.Joints)
        //    {
        //        Brush drawBrush = null;

        //        if (joint.TrackingState == JointTrackingState.Tracked)
        //        {
        //            drawBrush = this.trackedJointBrush;
        //        }
        //        else if (joint.TrackingState == JointTrackingState.Inferred)
        //        {
        //            drawBrush = this.inferredJointBrush;
        //        }

        //        if (drawBrush != null)
        //        {
        //            drawingContext.DrawEllipse(drawBrush, null, this.SkeletonPointToScreen(joint.Position), JointThickness, JointThickness);
        //        }
        //    }
        //}

        /// <summary>
        /// Maps a SkeletonPoint to lie within our render space and converts to Point 将SkeletonPoint映射到我们的渲染空间内并转换为Point
        /// </summary>
        /// <param name="skelpoint">point to map</param>
        /// <returns>mapped point</returns>
        //private Point SkeletonPointToScreen(SkeletonPoint skelpoint)
        //{
        //    // Convert point to depth space.   将点转换为深度空间。
        //    // We are not using depth directly, but we do want the points in our 640x480 output resolution.
        //    // 我们没有直接使用深度,但我们确实需要640x480输出分辨率的点数。
        //    DepthImagePoint depthPoint = this.sensor.CoordinateMapper.MapSkeletonPointToDepthPoint(skelpoint, DepthImageFormat.Resolution640x480Fps30);
        //    return new Point(depthPoint.X, depthPoint.Y);
        //}

        /// <summary>
        /// Draws a bone line between two joints 在两个关节之间绘制骨骼线
        /// </summary>
        /// <param name="skeleton">skeleton to draw bones from</param>
        /// <param name="drawingContext">drawing context to draw to</param>
        /// <param name="jointType0">joint to start drawing from</param>
        /// <param name="jointType1">joint to end drawing at</param>
        //private void DrawBone(Skeleton skeleton, DrawingContext drawingContext, JointType jointType0, JointType jointType1)
        //{
        //    Joint joint0 = skeleton.Joints[jointType0];
        //    Joint joint1 = skeleton.Joints[jointType1];

        //    // If we can't find either of these joints, exit
        //    if (joint0.TrackingState == JointTrackingState.NotTracked ||
        //        joint1.TrackingState == JointTrackingState.NotTracked)
        //    {
        //        return;
        //    }

        //    // Don't draw if both points are inferred
        //    if (joint0.TrackingState == JointTrackingState.Inferred &&
        //        joint1.TrackingState == JointTrackingState.Inferred)
        //    {
        //        return;
        //    }

        //    // We assume all drawn bones are inferred unless BOTH joints are tracked
        //    Pen drawPen = this.inferredBonePen;
        //    if (joint0.TrackingState == JointTrackingState.Tracked && joint1.TrackingState == JointTrackingState.Tracked)
        //    {
        //        drawPen = this.trackedBonePen;
        //    }

        //    drawingContext.DrawLine(drawPen, this.SkeletonPointToScreen(joint0.Position), this.SkeletonPointToScreen(joint1.Position));
        //}

        /// <summary>
        /// Called every time a video (RGB) frame is ready  每次视频(RGB)帧准备就绪时调用
        /// </summary>
        /// <param name="sender">The sender object</param>
        /// <param name="e">Image Frame Ready Event Args</param>
        //private void NuiColorFrameReady(object sender, ColorImageFrameReadyEventArgs e)
        //{
        //    // 32-bit per pixel, RGBA image
        //    /*
        //    ColorImageFrame image = e.OpenColorImageFrame();
        //    byte[] convertedImageFrame=new byte[image.PixelDataLength];
        //    image.CopyPixelDataTo(convertedImageFrame);
        //    videoImage.Source = BitmapSource.Create(
        //        image.Width, image.Height, 96, 96, PixelFormats.Bgr32, null, convertedImageFrame, image.Width * image.BytesPerPixel);
        //    */
        //    using (ColorImageFrame colorFrame = e.OpenColorImageFrame())
        //    {
        //        if (colorFrame != null)
        //        {
        //            // Copy the pixel data from the image to a temporary array
        //            colorFrame.CopyPixelDataTo(this.colorPixels);

        //            // Write the pixel data into our bitmap
        //            // public void WritePixels(Int32Rect sourceRect, IntPtr sourceBuffer, int sourceBufferSize, int sourceBufferStride, int destinationX, int destinationY);
        //            this.colorBitmap.WritePixels(
        //                new Int32Rect(0, 0, this.colorBitmap.PixelWidth, this.colorBitmap.PixelHeight),
        //                this.colorPixels,
        //                this.colorBitmap.PixelWidth * sizeof(int),
        //                0);
        //        }
        //    }
        //}

        /// <summary>
        /// Runs after the window is loaded 窗口加载后运行
        /// </summary>
        /// <param name="sender">The sender object</param>
        /// <param name="e">Routed Event Args</param>
        private void WindowLoaded(object sender, RoutedEventArgs e)
        {
            this.sensor = KinectSensor.GetDefault();
            //This event handler is set to know if the Kinect sensor status is changed
            this.sensor.IsAvailableChanged += this.Sensor_IsAvailableChanged;

            // To open the sensor to detect the bodies
            this.sensor.Open();

            // The status text to find the status of Kinect
            this.StatusText = this.sensor.IsAvailable ? Properties.Resources.RunningStatusText
                                                            : Properties.Resources.NoSensorStatusText;
            //TODO   _coordinatemapper在源程序中需要用到 现在暂时弃用
            // _coordinatemapper = new CoordinateMapper(potentialSensor);

            if (null != this.sensor)
            {
                /// 新代码 用于处理彩色图像的操作
                // open the reader for the color frames
                this.colorFrameReader = this.sensor.ColorFrameSource.OpenReader();
                FrameDescription colorFrameDescription = this.sensor.ColorFrameSource.CreateFrameDescription(ColorImageFormat.Bgra);
                this.colorBitmap       = new WriteableBitmap(colorFrameDescription.Width, colorFrameDescription.Height, 96.0, 96.0, PixelFormats.Bgr32, null);
                this.videoImage.Source = this.colorBitmap;
                // wire handler for frame arrival
                this.colorFrameReader.FrameArrived += this.Reader_ColorFrameArrived;
                // create the colorFrameDescription from the ColorFrameSource using Bgra format

                // create the bitmap to display



                ////源代码 用于处理彩色图像的操作
                //this.sensor.ColorStream.Enable(ColorImageFormat.RgbResolution640x480Fps30);
                //// Allocate space to put the pixels we'll receive 分配空间以放置我们将接收的像素
                //this.colorPixels = new byte[this.sensor.ColorStream.FramePixelDataLength];
                //// This is the bitmap we'll display on-screen  这是我们将在屏幕上显示的位图
                //this.colorBitmap = new WriteableBitmap(this.sensor.ColorStream.FrameWidth, this.sensor.ColorStream.FrameHeight, 96.0, 96.0, PixelFormats.Bgr32, null);
                //this.videoImage.Source = this.colorBitmap;
                //// If you want to see the RGB stream then include this  如果要查看RGB流,请包含此内容
                //this.sensor.ColorFrameReady += this.NuiColorFrameReady;


                ////TODO 源代码 用于处理深度图像的操作
                //this.sensor.DepthStream.Enable(DepthImageFormat.Resolution640x480Fps30);
                //// Allocate space to put the depth pixels we'll receive
                //this.depthPixels = new DepthImagePixel[this.sensor.DepthStream.FramePixelDataLength];
                //this.depthcolorPixels = new byte[this.sensor.DepthStream.FramePixelDataLength * sizeof(int)];
                //// This is the bitmap we'll display on-screen
                //this.depthBitmap = new WriteableBitmap(this.sensor.DepthStream.FrameWidth, this.sensor.DepthStream.FrameHeight, 96.0, 96.0, PixelFormats.Bgr32, null);
                //// Set the image we display to point to the bitmap where we'll put the image data  将我们显示的图像设置为指向我们将放置图像数据的位图
                //this.depthImage.Source = this.depthBitmap;
                //// If you want to see the depth image and frames per second then include this 如果要查看深度图像和每秒帧数,请包含此项
                //// I'mma turn this off 'cos my 'puter is proper slow
                //this.sensor.DepthFrameReady += this.NuiDepthFrameReady;


                ///新代码 用于处理骨骼图像的操作
                // open the body frame reader to read the data frames from Kinect
                this.bodyFrameReader = this.sensor.BodyFrameSource.OpenReader();
                // This event notifier is used to identify if the body frame has arrived

                // It initializes the bodyviewer object to display tracked bodies in Presentation Layer
                this.kinectBodyView = new BodyView(this.sensor);
                // set our data context objects for display in UI
                this.DataContext = this;
                this.kinectBodyViewbox.DataContext = this.kinectBodyView;

                this.bodyFrameReader.FrameArrived += this.Reader_BodyFrameArrived;
                this.bodyFrameReader.FrameArrived += SkeletonExtractSkeletonFrameReady;
                Skeleton2DDataExtract.Skeleton2DdataCoordReady += this.NuiSkeleton2DdataCoordReady;

                ////TODO 源代码 用于处理骨骼图像的操作
                //this.sensor.SkeletonStream.Enable();
                //// Create the drawing group we'll use for drawing  创建我们将用于绘图的绘图组
                //this.skelDrawingGroup = new DrawingGroup();

                //// Create an image source that we can use in our image control  创建一个我们可以在图像控件中使用的图像源
                //this.skelImageSource = new DrawingImage(this.skelDrawingGroup);

                //// Display the drawing using our image control  使用我们的图像控件显示图形
                //skeletonImage.Source = this.skelImageSource;
                //// Track Seated User (Change to SkeletonTrackingMode.Default if not seated)
                //this.sensor.SkeletonStream.TrackingMode = SkeletonTrackingMode.Seated;
                //this.sensor.SkeletonFrameReady += this.SensorSkeletonFrameReady;
                //this.sensor.SkeletonFrameReady += SkeletonExtractSkeletonFrameReady;
                //Skeleton2DDataExtract.Skeleton2DdataCoordReady += this.NuiSkeleton2DdataCoordReady;
                try
                {
                    this.sensor.Open();
                }
                catch (IOException)
                {
                    this.sensor = null;
                }
            }

            _lastTime = DateTime.Now;

            _dtw   = new DtwGestureRecognizer(12, 0.6, 2, 2, 10);
            _video = new ArrayList();

            // Update the debug window with Sequences information 使用序列信息更新调试窗口
            dtwTextOutput.Text = _dtw.RetrieveText();

            Debug.WriteLine("Finished Window Loading");
        }
        void MainWindow_Loaded(object sender, RoutedEventArgs e)
        {
            InitializeButtons();

            // Setup osc sender
            oscArgs[0] = "127.0.0.1";
            oscArgs[1] = "3333";
            oscWriter  = new UdpWriter(oscArgs[0], Convert.ToInt32(oscArgs[1]));

            //Since only a color video stream is needed, RuntimeOptions.UseColor is used.
            _runtime.Initialize(RuntimeOptions.UseDepthAndPlayerIndex | Microsoft.Research.Kinect.Nui.RuntimeOptions.UseColor | RuntimeOptions.UseSkeletalTracking);
            _runtime.SkeletonEngine.TransformSmooth = true;

            //Use to transform and reduce jitter
            _runtime.SkeletonEngine.SmoothParameters = new TransformSmoothParameters
            {
                Smoothing          = 0.5f,
                Correction         = 0.3f,
                Prediction         = 0.4f,
                JitterRadius       = 0.05f,
                MaxDeviationRadius = 0.04f
            };

            try
            {
                _runtime.VideoStream.Open(ImageStreamType.Video, 2, ImageResolution.Resolution640x480, ImageType.Color);
                _runtime.DepthStream.Open(ImageStreamType.Depth, 2, ImageResolution.Resolution320x240, ImageType.DepthAndPlayerIndex);
            }
            catch (InvalidOperationException)
            {
                System.Windows.MessageBox.Show(
                    "Failed to open stream. Please make sure to specify a supported image type and resolution.");
                return;
            }
            _lastTime = DateTime.Now;

            _dtw   = new DtwGestureRecognizer(18, 0.6, 2, 2, 10);
            _video = new ArrayList();

            //// If you want to see the depth image and frames per second then include this
            //// I'mma turn this off 'cos my 'puter is proper slow
            _runtime.DepthFrameReady += NuiDepthFrameReady;

            _runtime.SkeletonFrameReady += NuiSkeletonFrameReady;
            _runtime.SkeletonFrameReady += SkeletonExtractSkeletonFrameReady;

            //// If you want to see the RGB stream then include this
            //_runtime.VideoFrameReady += NuiColorFrameReady;

            Skeleton3DDataExtract.Skeleton3DdataCoordReady += NuiSkeleton3DdataCoordReady;

            speechRecognizer = SpeechRecognizer.Create();         //returns null if problem with speech prereqs or instantiation.
            if (speechRecognizer != null)
            {
                speechRecognizer.Start(new KinectAudioSource());  //KinectSDK TODO: expose Runtime.AudioSource to return correct audiosource.
                speechRecognizer.SaidSomething += new EventHandler <SpeechRecognizer.SaidSomethingEventArgs>(recognizer_SaidSomething);
            }
            else
            {
                dtwTextOutput.Text = "No Speech";
                speechRecognizer   = null;
            }
        }