Beispiel #1
0
		void RuntimeDepthFrameReady(AllFramesReadyEventArgs e)
		{
			using (DepthImageFrame depthFrame = e.OpenDepthImageFrame())
			{
				if (depthFrame == null)
				{
					return; 
				}

                //turn raw data into array of distances
				var depthArray = depthFrame.ToDepthArray();

                //get image
				DepthImage.Image = depthFrame.ToBitmap();

                //get midpoint
				MidPointDistanceViaGetDistanceText.Text = depthFrame.GetDistance(depthFrame.Width/2, depthFrame.Height/2).ToString();

				//image
				DepthImageWithMinDistance.Image = depthArray.ToBitmap(depthFrame.Width, depthFrame.Height, _minDistance, Color.FromArgb(255, 255, 0, 0));

				if (_saveDepthFrame)
				{
					_saveDepthFrame = false;
					depthFrame.ToBitmap().Save(DateTime.Now.ToString("yyyyMMddHHmmss") + "_depth.jpg", ImageFormat.Jpeg);
				}

			}

		}
        void kinect_AllFramesReady( object sender, AllFramesReadyEventArgs e )
        {
            // 赤外線画像を表示する
            using ( ColorImageFrame colorFrame = e.OpenColorImageFrame() ) {
                if ( colorFrame != null ) {
                    // 赤外線画像データを取得する
                    byte[] color = new byte[colorFrame.PixelDataLength];
                    colorFrame.CopyPixelDataTo( color );

                    // 赤外線画像を表示する(16bitのグレースケール)
                    imageInfrared.Source = BitmapSource.Create( colorFrame.Width, colorFrame.Height,
                        96, 96, PixelFormats.Gray16, null, color,
                        colorFrame.Width * colorFrame.BytesPerPixel );
                }
            }

            // 距離データを表示する
            using ( DepthImageFrame depthFrame = e.OpenDepthImageFrame() ) {
                if ( depthFrame != null ) {
                    // 可視画像に変換する
                    short[] depth = new short[depthFrame.PixelDataLength];
                    depthFrame.CopyPixelDataTo( depth );

                    for ( int i = 0; i < depth.Length; i++ ) {
                        depth[i] = (short)~depth[i];
                    }

                    imageDepth.Source = BitmapSource.Create( depthFrame.Width, depthFrame.Height,
                        96, 96, PixelFormats.Gray16, null, depth,
                        depthFrame.Width * depthFrame.BytesPerPixel );
                }
            }
        }
        void GetCameraPoint(Skeleton first, AllFramesReadyEventArgs e)
        {
            using (DepthImageFrame depth = e.OpenDepthImageFrame())
            {
                if (depth == null ||
                    _sensor == null)
                {
                    return;
                }

                DepthImagePoint headDepthPoint = this._sensor.CoordinateMapper.MapSkeletonPointToDepthPoint(
                    first.Joints[JointType.Head].Position, DepthImageFormat.Resolution640x480Fps30);

                DepthImagePoint leftDepthPoint = this._sensor.CoordinateMapper.MapSkeletonPointToDepthPoint(
                    first.Joints[JointType.HandLeft].Position, DepthImageFormat.Resolution640x480Fps30);

                DepthImagePoint rightDepthPoint = this._sensor.CoordinateMapper.MapSkeletonPointToDepthPoint(
                    first.Joints[JointType.HandRight].Position, DepthImageFormat.Resolution640x480Fps30);

                ColorImagePoint headColorPoint = this._sensor.CoordinateMapper.MapDepthPointToColorPoint(
                    DepthImageFormat.Resolution640x480Fps30, headDepthPoint, ColorImageFormat.RgbResolution640x480Fps30);

                ColorImagePoint leftColorPoint = this._sensor.CoordinateMapper.MapDepthPointToColorPoint(
                    DepthImageFormat.Resolution640x480Fps30, leftDepthPoint, ColorImageFormat.RgbResolution640x480Fps30);

                ColorImagePoint rightColorPoint = this._sensor.CoordinateMapper.MapDepthPointToColorPoint(
                    DepthImageFormat.Resolution640x480Fps30, rightDepthPoint, ColorImageFormat.RgbResolution640x480Fps30);

                CameraPosition(ellipseHead, headColorPoint);
                CameraPosition(ellipseLeft, leftColorPoint);
                CameraPosition(ellipseRight, rightColorPoint);
            }
        }
        void kinect_AllFramesReady( object sender, AllFramesReadyEventArgs e )
        {
            using ( var colorFrame = e.OpenColorImageFrame() ) {
                if ( colorFrame != null ) {
                    var pixel = new byte[colorFrame.PixelDataLength];
                    colorFrame.CopyPixelDataTo( pixel );

                    ImageRgb.Source = BitmapSource.Create( colorFrame.Width, colorFrame.Height, 96, 96,
                        PixelFormats.Bgr32, null, pixel, colorFrame.Width * 4 );
                }
            }

            using ( var depthFrame = e.OpenDepthImageFrame() ) {
                if ( depthFrame != null ) {
                    // Depth情報を入れる
                    // GetRawPixelData()はインタラクションライブラリ内で実装された拡張メソッド
                    stream.ProcessDepth( depthFrame.GetRawPixelData(), depthFrame.Timestamp );
                }
            }

            using ( var skeletonFrame = e.OpenSkeletonFrame() ) {
                if ( skeletonFrame != null ) {
                    var skeletons = new Skeleton[skeletonFrame.SkeletonArrayLength];
                    skeletonFrame.CopySkeletonDataTo( skeletons );

                    // スケルトン情報を入れる
                    stream.ProcessSkeleton( skeletons, kinect.AccelerometerGetCurrentReading(), skeletonFrame.Timestamp );
                }
            }
        }
Beispiel #5
0
        void GetCameraPoint(Skeleton first, AllFramesReadyEventArgs e)
        {
            using (DepthImageFrame depth = e.OpenDepthImageFrame())
            {
                if (depth == null ||
                    kinectSensorChooser1.Kinect == null)
                {
                    return;
                }

                //Map a joint location to a point on the depth map
                //left hand
                DepthImagePoint leftDepthPoint =
                    depth.MapFromSkeletonPoint(first.Joints[JointType.HandLeft].Position);

                //Map a depth point to a point on the color image
                //left hand
                ColorImagePoint leftColorPoint =
                    depth.MapToColorImagePoint(leftDepthPoint.X, leftDepthPoint.Y,
                    ColorImageFormat.RgbResolution640x480Fps30);

                CameraPosition(arrow, leftColorPoint);
                SwipeCheck(leftColorPoint.X);
            }
        }
        bool working = false; // Skip frames if we're still processing stuff.

        public void CalibrationAllFramesReady(object sender, AllFramesReadyEventArgs e)
        {
            if (working) return;
            working = true;

            using (ColorImageFrame colorFrame = e.OpenColorImageFrame())
            {
                if (colorFrame == null)
                {
                    working = false;
                    return;
                }

                using (DepthImageFrame depthFrame = e.OpenDepthImageFrame())
                {
                    if (depthFrame == null)
                    {
                        working = false;
                        return;
                    }

                    //byte[] pixels = new byte[colorFrame.PixelDataLength];
                    //colorFrame.CopyPixelDataTo(pixels);
                    //int stride = colorFrame.Width * 4;
                    //debugImage.Source = BitmapSource.Create(colorFrame.Width, colorFrame.Height, 96, 96, PixelFormats.Bgr32, null, pixels, stride);
                    //debugImage.Visibility = Visibility.Visible;

                    //int code_num = find_code(colorFrame, depthFrame);
                    int code_num = find_touch(colorFrame, depthFrame);
                    if (code_num >= 0)
                    {
                        // Make the next code visible.
                        if (code_num < 4)
                        {
                            codes[code_num].Visibility = Visibility.Hidden;
                            codes[code_num + 1].Visibility = Visibility.Visible;
                            next_code_num++;
                            Thread.Sleep(3000);
                        }
                        else
                        {
                            Thread.Sleep(3000);
                            // We are done. Calculate the coefficients.
                            sensor.AllFramesReady -= this.CalibrationAllFramesReady;
                            codes[4].Visibility = Visibility.Hidden;
                            kinectController.calibration_coefficients = get_calibration_coeffs();
                            
                            Point center_top_left = code_points[0];
                            Point center_bot_right = code_points[4];
                            kinectController.Calibrate((int)(center_top_left.X + 1.25*code_size.X), (int)(center_top_left.Y + 0.7*code_size.Y), (int)(center_bot_right.X - 1.25*code_size.X), (int)(center_bot_right.Y - 0.8*code_size.Y));
                            sensor.AllFramesReady += kinectController.SensorAllFramesReady;
                            CalibrationDidComplete();
                        }
                    }
                }
            }

            working = false;
        }
 private void kinect_AllFramesReady(object sender, AllFramesReadyEventArgs e)
 {
     byte[] imagem = ObterImagemSensorRGB(e.OpenColorImageFrame());
     if (chkEscalaCinza.IsChecked.HasValue && chkEscalaCinza.IsChecked.Value)
         ReconhecerDistancia(e.OpenDepthImageFrame(), imagem, 2000);
     if (imagem != null)
         imagemCamera.Source = BitmapSource.Create(kinect.ColorStream.FrameWidth, kinect.ColorStream.FrameHeight, 96, 96, PixelFormats.Bgr32, null, imagem, kinect.ColorStream.FrameBytesPerPixel * kinect.ColorStream.FrameWidth);
 }
        /// <summary>
        /// Manage frames of kinect sensor according to the services activated
        /// </summary>
        /// <param name="e"></param>
        private void ManageAllFrame(AllFramesReadyEventArgs e)
        {
            if (!IsRunning)
            {
                return;
            }

            // SkeletonTracking Frame Manager
            using (SkeletonFrame SFrame = e.OpenSkeletonFrame())
            {
                try
                {
                    ManageSkeletonFrame(SFrame);
                }
                catch (Exception ex)
                {
                    // Just log the error
                    Console.Error.WriteLine("Error with skeleton frame : " + ex.Message + " _ " + ex.StackTrace);
                }
            }

            // Color Frame Manager
            if (PropertiesPluginKinect.Instance.EnableColorFrameService)
            {
                using (ColorImageFrame CFrame = e.OpenColorImageFrame())
                {
                    try
                    {
                        ManageColorFrame(CFrame);
                    }
                    catch (Exception ex)
                    {
                        // Just log the error
                        Console.Error.WriteLine("Error with color frame : " + ex.Message + " _ " + ex.StackTrace);
                    }
                }
            }

            // Depth Frame Manager
            if (PropertiesPluginKinect.Instance.EnableDepthFrameService ||
                PropertiesPluginKinect.Instance.KinectPointingModeEnabled ||
                PropertiesPluginKinect.Instance.EnableGestureGrip)
            {
                using (DepthImageFrame DFrame = e.OpenDepthImageFrame())
                {
                    try
                    {
                        ManageDepthFrame(DFrame);
                    }
                    catch (Exception ex)
                    {
                        // Just log the error
                        Console.Error.WriteLine("Error with depth frame : " + ex.Message + " _ " + ex.StackTrace);
                    }

                }
            }
        }
        void GetCameraPoint(Skeleton first, AllFramesReadyEventArgs e)
        {
            using (DepthImageFrame depth = e.OpenDepthImageFrame())
            {
                if (depth == null ||
                    _sensor == null)
                {
                    return;
                }

                //Map a joint location to a point on the depth map
                //head
                DepthImagePoint headDepthPoint =
                    depth.MapFromSkeletonPoint(first.Joints[JointType.Head].Position);
                //left hand
                DepthImagePoint leftHandDepthPoint =
                    depth.MapFromSkeletonPoint(first.Joints[JointType.HandLeft].Position);
                //right hand
                DepthImagePoint rightHandDepthPoint =
                    depth.MapFromSkeletonPoint(first.Joints[JointType.HandRight].Position);
                //left foot
                DepthImagePoint leftFootDepthPoint =
                    depth.MapFromSkeletonPoint(first.Joints[JointType.HandRight].Position);
                //right foot
                DepthImagePoint rightFootDepthPoint =
                    depth.MapFromSkeletonPoint(first.Joints[JointType.HandRight].Position);
                //hip
                DepthImagePoint hipDepthPoint =
                    depth.MapFromSkeletonPoint(first.Joints[JointType.HipCenter].Position);

                //Map a depth point to a point on the color image
                //head
                ColorImagePoint headColorPoint =
                    depth.MapToColorImagePoint(headDepthPoint.X, headDepthPoint.Y,
                    ColorImageFormat.RgbResolution640x480Fps30);
                //left hand
                ColorImagePoint leftHandColorPoint =
                    depth.MapToColorImagePoint(leftHandDepthPoint.X, leftHandDepthPoint.Y,
                    ColorImageFormat.RgbResolution640x480Fps30);
                //right hand
                ColorImagePoint rightHandColorPoint =
                    depth.MapToColorImagePoint(rightHandDepthPoint.X, rightHandDepthPoint.Y,
                    ColorImageFormat.RgbResolution640x480Fps30);
                //left foot
                ColorImagePoint leftFootColorPoint =
                    depth.MapToColorImagePoint(leftFootDepthPoint.X, leftFootDepthPoint.Y,
                    ColorImageFormat.RgbResolution640x480Fps30);
                //right foot
                ColorImagePoint rightFootColorPoint =
                    depth.MapToColorImagePoint(rightFootDepthPoint.X, rightFootDepthPoint.Y,
                    ColorImageFormat.RgbResolution640x480Fps30);
                //hip
                ColorImagePoint hipColorPoint =
                    depth.MapToColorImagePoint(hipDepthPoint.X, hipDepthPoint.Y,
                    ColorImageFormat.RgbResolution640x480Fps30);
            }
        }
 // すべてのデータの更新通知を受け取る
 void kinect_AllFramesReady(object sender, AllFramesReadyEventArgs e)
 {
     using (ColorImageFrame colorFrame = e.OpenColorImageFrame())
     {
         using (DepthImageFrame depthFrame = e.OpenDepthImageFrame())
         {
             RenderScreen(colorFrame, depthFrame);
         }
     }
 }
 private void Kinect_AllFramesReady(object sender, AllFramesReadyEventArgs e)
 {
     if (cnt++ < 3) return;
     cnt = 0;
     using (var f = e.OpenDepthImageFrame())
     {
         var pd = f.GetRawPixelData();
         int mx = 1000, my = 1000, Mx = 0, My = 0;
         for (int i = 0; i < 320; i++)
         {
             for (int j = 0; j < 240; j++)
             {
                 if (get(pd, i, j).PlayerIndex > 0)
                 {
                     if (i < mx) mx = i;
                     if (i > Mx) Mx = i;
                     if (j < my) my = j;
                     if (j > My) My = j;
                 }
             }
         }
         if (mx<1000)
         {
             float wc = (Mx - mx) / 16;
             float hc = (My - my) / 16;
             if (keep_proportions)
             {
                 if (hc < wc) hc = wc;
                 else wc = hc;
             }
             Console.WriteLine("mx={0},Mx={1},my={2},My={3}", mx, Mx, my, My);
             for (int j=0;j<16;j++)
             {
                 int b1 = 0;
                 for (int i = 0; i<8; i++)
                 {
                     b1 = b1 * 2 + ((get(pd, mx + (int)(wc * i), my + (int)(hc * j)).PlayerIndex > 0) ? 1 : 0);
                 }
                 int b2 = 0;
                 for (int i = 8; i < 16; i++)
                 {
                     b2 = b2 * 2 + ((get(pd, mx + (int)(wc * i), my + (int)(hc * j)).PlayerIndex > 0) ? 1 : 0);
                 }
                 byte[] x = new byte[2];
                 x[0] = (byte)b1;
                 x[1] = (byte)b2;
                 COM.Write(x, 0, 2);
                 Console.WriteLine("Sending {0},{1}", b1, b2);
             }
         }
         // Console.Write("{0}\r", pd.Length);
     }
 }
        private void GetCameraPoint(Skeleton first, AllFramesReadyEventArgs e)
        {
            using (DepthImageFrame depthFrame = e.OpenDepthImageFrame())
            {
                if (depthFrame == null || _sensor == null)
                    return;

                // DepthImagePoint headDepthPoint = depthFrame.MapFromSkeletonPoint(first.Joints[JointType.Head].Position);

                //left hand point information
                DepthImagePoint LeftHandDepthPoint = depthFrame.MapFromSkeletonPoint(first.Joints[JointType.HandLeft].Position);
                LeftHandPoint newLeftHandPoint = new LeftHandPoint()
                {
                    X = LeftHandDepthPoint.X,
                    Y = LeftHandDepthPoint.Y,
                    Z = LeftHandDepthPoint.Depth,
                    T = DateTime.Now
                };

                DepthImagePoint HeadDepthPoint = depthFrame.MapFromSkeletonPoint(first.Joints[JointType.Head].Position);
                HeadPoint newHeadPoint = new HeadPoint()
                {
                    X = HeadDepthPoint.X,
                    Y = HeadDepthPoint.Y,
                    Z = HeadDepthPoint.Depth,
                    T = DateTime.Now
                };

                //user should stand in the right place before eveything start
                // the two if condition requires the user to stand in front of Kinect in a box area
                if (newHeadPoint.Z < 1700 || newHeadPoint.Z > 2000)
                {
                    StatusLabel.Visibility = System.Windows.Visibility.Hidden;
                    StatusLabel.Content = "";
                    return;
                }

                StatusLabel.Visibility = System.Windows.Visibility.Visible;
                StatusLabel.Content = "Control Mode(1.7m~2m): " + newHeadPoint.Z / 1000 + "m";
                // left hand wave to quit system
                if (newLeftHandPoint.Y < newHeadPoint.Y)
                {
                    // MessageBox.Show("Left wave");
                    LeftHandWave(newLeftHandPoint, newHeadPoint);
                }
                else
                {
                    IsLeftHandWave = false;
                }

            }// end of using statement
        }
        private void Kinect_AllFramesReady(object sender, AllFramesReadyEventArgs allFrameEvent)
        {
            byte[] imagem = ObterImagemSensorRGB(allFrameEvent.OpenColorImageFrame());

            FuncoesProfundidade(allFrameEvent.OpenDepthImageFrame(), imagem, 2000);
            if (imagem != null)
            {
                 canvasKinect.Background = new ImageBrush(BitmapSource.Create(Kinect.ColorStream.FrameWidth,
                               Kinect.ColorStream.FrameHeight,
                               96, 96, PixelFormats.Bgr32, null,
                               imagem, Kinect.ColorStream.FrameBytesPerPixel * Kinect.ColorStream.FrameWidth));
            }

            canvasKinect.Children.Clear();
            FuncoesEsqueletoUsuario(allFrameEvent.OpenSkeletonFrame());
        }
Beispiel #14
0
        void newSensor_AllFramesReady(object sender, AllFramesReadyEventArgs e)
        {

            using (DepthImageFrame depthFrame = e.OpenDepthImageFrame())
            {
                if (depthFrame == null)
                {
                    return;
                }
                byte[] pixels = GenerateColoredBytes(depthFrame);
                int stride = depthFrame.Width * 4;

                image1.Source = BitmapSource.Create(depthFrame.Width, depthFrame.Height, 96, 96, PixelFormats.Bgr32, null, pixels, stride);

            }
        }
        private void kinect_AllFramesReady(object sender, AllFramesReadyEventArgs e)
        {
            byte[] imagem =  ObterImagemSensorRGB(e.OpenColorImageFrame());

            if( chkEscalaCinza.IsChecked.HasValue && chkEscalaCinza.IsChecked.Value)
                ReconhecerDistancia(e.OpenDepthImageFrame(),imagem, 2000);

            if (imagem != null)
                canvasKinect.Background = new ImageBrush(BitmapSource.Create(kinect.ColorStream.FrameWidth, kinect.ColorStream.FrameHeight,
                                    96, 96, PixelFormats.Bgr32, null, imagem,
                                    kinect.ColorStream.FrameWidth * kinect.ColorStream.FrameBytesPerPixel));

            canvasKinect.Children.Clear();
            DesenharEsqueletoUsuario(e.OpenSkeletonFrame());
            
        }
        // すべてのデータの更新通知を受け取る
        void kinect_AllFramesReady( object sender, AllFramesReadyEventArgs e )
        {
            // Disposableなのでusingでくくる
            using ( ColorImageFrame colorFrame = e.OpenColorImageFrame() ) {
                if ( colorFrame != null ) {
                    imageRgbCamera.Source = colorFrame.ToBitmapSource();
                }
            }

            // Disposableなのでusingでくくる
            using ( DepthImageFrame depthFrame = e.OpenDepthImageFrame() ) {
                if ( depthFrame != null ) {
                    imageDepthCamera.Source = depthFrame.ToBitmapSource();
                }
            }
        }
        /// <summary>
        /// Handles the Kinect AllFramesReady event
        /// </summary>
        private void Sensor_AllFramesReady(object sender, AllFramesReadyEventArgs e)
        {
            ColorImageFrame colorFrame = null;
            DepthImageFrame depthFrame = null;
            Skeleton[] skeletonData;

            try
            {
                colorFrame = e.OpenColorImageFrame();
                depthFrame = e.OpenDepthImageFrame();

                using (var skeletonFrame = e.OpenSkeletonFrame())
                {
                    if (colorFrame == null || depthFrame == null || skeletonFrame == null)
                        return;

                    skeletonData = new Skeleton[skeletonFrame.SkeletonArrayLength];
                    skeletonFrame.CopySkeletonDataTo(skeletonData);
                }

                // Find a skeleton to track.
                // First see if our old one is good.
                // When a skeleton is in PositionOnly tracking state, don't pick a new one
                // as it may become fully tracked again.
                Skeleton skeletonOfInterest = skeletonData.FirstOrDefault(s => s.TrackingId == this.trackedSkeletonId && s.TrackingState != SkeletonTrackingState.NotTracked);

                if (skeletonOfInterest == null)
                {
                    // Old one wasn't around.  Find any skeleton that is being tracked and use it.
                    skeletonOfInterest = skeletonData.FirstOrDefault(s => s.TrackingState == SkeletonTrackingState.Tracked);

                    if (skeletonOfInterest != null)
                        this.trackedSkeletonId = skeletonOfInterest.TrackingId;
                }

                if (this.FrameDataUpdated != null)
                    this.FrameDataUpdated(this, new FrameData(colorFrame, depthFrame, skeletonOfInterest));
            }
            finally
            {
                if (colorFrame != null)
                    colorFrame.Dispose();

                if (depthFrame != null)
                    depthFrame.Dispose();
            }
        }
        void GetCameraPoint(Skeleton first, bool flag,  AllFramesReadyEventArgs e)
        {
            using (DepthImageFrame depth = e.OpenDepthImageFrame())
            {
                if (depth == null ||
                    kinectSensorChooser1.Kinect == null)
                {
                    return;
                }

                //Map a joint location to a point on the depth map
                //head
                DepthImagePoint headDepthPoint =
                    depth.MapFromSkeletonPoint(first.Joints[JointType.Head].Position);
                //left hand
                DepthImagePoint leftDepthPoint =
                    depth.MapFromSkeletonPoint(first.Joints[JointType.HandLeft].Position);
                //right hand
                DepthImagePoint rightDepthPoint =
                    depth.MapFromSkeletonPoint(first.Joints[JointType.HandRight].Position);

                //Map a depth point to a point on the color image
                //head
                ColorImagePoint headColorPoint =
                    depth.MapToColorImagePoint(headDepthPoint.X, headDepthPoint.Y,
                    ColorImageFormat.RgbResolution640x480Fps30);
                //left hand
                ColorImagePoint leftColorPoint =
                    depth.MapToColorImagePoint(leftDepthPoint.X, leftDepthPoint.Y,
                    ColorImageFormat.RgbResolution640x480Fps30);
                //right hand
                ColorImagePoint rightColorPoint =
                    depth.MapToColorImagePoint(rightDepthPoint.X, rightDepthPoint.Y,
                    ColorImageFormat.RgbResolution640x480Fps30 );

                //Set location
                if(flag)
                    CameraPosition(headImage, headColorPoint);
                else
                {
                    CameraPosition(headImage2, headColorPoint);
                }
                CameraPosition(leftEllipse, leftColorPoint);
                CameraPosition(rightEllipse, rightColorPoint);
            }
        }
        public void GetCameraPoint(Skeleton first, AllFramesReadyEventArgs e)
        {

            using (DepthImageFrame depth = e.OpenDepthImageFrame())
            {
                if (depth == null ||
                    KinectSensor.Kinect == null)
                {
                    return;
                }


                //Map a joint location to a point on the depth map
                //head
                DepthImagePoint headDepthPoint =
                    depth.MapFromSkeletonPoint(first.Joints[JointType.Head].Position);
                //left hand
                DepthImagePoint leftDepthPoint =
                    depth.MapFromSkeletonPoint(first.Joints[JointType.HandLeft].Position);
                //right hand
                DepthImagePoint rightDepthPoint =
                    depth.MapFromSkeletonPoint(first.Joints[JointType.HandRight].Position);


                //Map a depth point to a point on the color image
                //head
                ColorImagePoint headColorPoint =
                    depth.MapToColorImagePoint(headDepthPoint.X, headDepthPoint.Y,
                    ColorImageFormat.RgbResolution640x480Fps30);
                //left hand
                ColorImagePoint leftColorPoint =
                    depth.MapToColorImagePoint(leftDepthPoint.X, leftDepthPoint.Y,
                    ColorImageFormat.RgbResolution640x480Fps30);
                //right hand
                ColorImagePoint rightColorPoint =
                    depth.MapToColorImagePoint(rightDepthPoint.X, rightDepthPoint.Y,
                    ColorImageFormat.RgbResolution640x480Fps30);


                //Set location
                //CameraPosition(Head, headColorPoint);
                //CameraPosition(LeftHand, leftColorPoint);
                //CameraPosition(RightHand, rightColorPoint);
            }
        }
        public void SensorAllFramesReady(object sender, AllFramesReadyEventArgs e)
        {
            bool depthReceived = false;
              bool colorReceived = false;

              using (DepthImageFrame depthFrame = e.OpenDepthImageFrame()) {
            if (null != depthFrame) {
              // Copy the pixel data from the image to a temporary array
              depthFrame.CopyDepthImagePixelDataTo(depthPixels);
              depthReceived = true;
            }
              }

              using (ColorImageFrame colorFrame = e.OpenColorImageFrame()) {
            if (null != colorFrame) {
              // Done by WSRKinectMacro
              colorReceived = true;
            }
              }

              if (depthReceived) {
            HandleDepth();
              }

              if (colorReceived) {
            HandleColor();
              }

              WSRKinect wsr = (WSRKinect)WSRConfig.GetInstance().GetWSRMicro();
              WriteableBitmap resize = colorBitmap.Resize(320, 240, WriteableBitmapExtensions.Interpolation.Bilinear);
              Bitmap image = wsr.GetColorPNG(resize, false);
              MemoryStream ms = new MemoryStream();

              image.Save(ms, format);
              image.Dispose();
              byte[] imgByte = ms.ToArray();
              base64String = Convert.ToBase64String(imgByte);

              // lock (sockets) {
              //   foreach (var socket in sockets) { SendWebSocket(socket, image); }
              //   image.Dispose();
              // }
        }
Beispiel #21
0
        public void SensorAllFramesReady(object sender, AllFramesReadyEventArgs e)
        {
            // Check if we need to change background
            if (drawController.backgroundAlreadySet == false)
            {
                drawController.ChangeBackground(drawController.background);
            }
            
            // Check if we need to change color
            if (drawController.shouldChangeColor != -1)
            {
                drawController.ChangeColor((Colors)drawController.shouldChangeColor);
            }

            using (DepthImageFrame depthFrame = e.OpenDepthImageFrame())
            {
                if (depthFrame != null)
                {
                    this.ParseDepthFrame(depthFrame);
                } 
            }
        }
        public void DisplayColorImageAllFramesReady(object sender, AllFramesReadyEventArgs e)
        {
            using (ColorImageFrame colorFrame = e.OpenColorImageFrame())
            {
                //Console.WriteLine("here");
                if (colorFrame == null) return;

                byte[] pixels = new byte[colorFrame.PixelDataLength];
                colorFrame.CopyPixelDataTo(pixels);
                int stride = colorFrame.Width * 4;
                debugImage.Source = BitmapSource.Create(colorFrame.Width, colorFrame.Height, 96, 96, PixelFormats.Bgr32, null, pixels, stride);

                using (DepthImageFrame depthFrame = e.OpenDepthImageFrame())
                {
                    short[] rawDepthData = new short[depthFrame.PixelDataLength];
                    depthFrame.CopyPixelDataTo(rawDepthData);
                    int depth = rawDepthData[200*depthFrame.Width + 300] >> DepthImageFrame.PlayerIndexBitmaskWidth;
                    threshold = depth;
                    
                }

            }
        }
        void getCameraPoint(Skeleton first, AllFramesReadyEventArgs e)
        {
            // Gets the data from the Kinect and sends it to the sendData method
            using (DepthImageFrame depth = e.OpenDepthImageFrame())
            {
                if (depth == null || kinectSensorChooser1.Kinect == null)
                {
                    return;
                }

                DepthImagePoint headDepthPoint = depth.MapFromSkeletonPoint(first.Joints[JointType.Head].Position);
                DepthImagePoint leftDepthPoint = depth.MapFromSkeletonPoint(first.Joints[JointType.HandLeft].Position);
                DepthImagePoint rightDepthPoint = depth.MapFromSkeletonPoint(first.Joints[JointType.Head].Position);

                ColorImagePoint headColorPoint = depth.MapToColorImagePoint(headDepthPoint.X, headDepthPoint.Y, ColorImageFormat.RgbResolution640x480Fps30);
                ColorImagePoint leftColorPoint = depth.MapToColorImagePoint(leftDepthPoint.X, leftDepthPoint.Y, ColorImageFormat.RgbResolution640x480Fps30);
                ColorImagePoint rightColorPoint = depth.MapToColorImagePoint(rightDepthPoint.X, rightDepthPoint.Y, ColorImageFormat.RgbResolution640x480Fps30);

                CameraPosition(leftHand, leftColorPoint);
                CameraPosition(rightHand, rightColorPoint);

                sendData(rightColorPoint.X, rightColorPoint.Y);
            }
        }
Beispiel #24
0
        void kinect_AllFramesReady(object sender, AllFramesReadyEventArgs e)
        {
            // Color
            using (var frame = e.OpenColorImageFrame())
            {
                //Get raw image
                if (frame != null)
                {
                    //Create array for pixel data and copy it from the image frame
                    Byte[] pixelData = new Byte[frame.PixelDataLength];
                    frame.CopyPixelDataTo(pixelData);

                    //Convert RGBA to BGRA
                    Byte[] bgraPixelData = new Byte[frame.PixelDataLength];
                    for (int i = 0; i < pixelData.Length; i += 4)
                    {
                        bgraPixelData[i]     = pixelData[i + 2];
                        bgraPixelData[i + 1] = pixelData[i + 1];
                        bgraPixelData[i + 2] = pixelData[i];
                        bgraPixelData[i + 3] = (Byte)255; //The video comes with 0 alpha so it is transparent
                    }

                    // Create a texture and assign the realigned pixels
                    colorVideo = new Texture2D(game.GraphicsDevice, frame.Width, frame.Height);
                    colorVideo.SetData(bgraPixelData);
                }
            }

            // Depth
            using (var frame = e.OpenDepthImageFrame())
            {
                if (frame != null)
                {
                    short[] pixelData = new short[frame.PixelDataLength];
                    pixelData = new short[frame.PixelDataLength];
                    frame.CopyPixelDataTo(pixelData);

                    depthVideo = new Texture2D(game.GraphicsDevice, frame.Width, frame.Height);

                    depthVideo.SetData(ConvertDepthFrame(pixelData, kinectSensor.DepthStream));
                }
            }

            using (var frame = e.OpenSkeletonFrame())
            {
                if (frame != null)
                {
                    skeletonPoints = new List <ColoredPoint>();
                    frame.CopySkeletonDataTo(this.skeletons);

                    //refresh the closest skeletons
                    this.TrackClosestSkeleton();

                    foreach (var body in skeletons)
                    {
                        if (body != null && body.TrackingState == SkeletonTrackingState.Tracked)
                        {
                            foreach (Joint joint in body.Joints)
                            {
                                SkeletonPoint skeletonPoint = joint.Position;

                                // 2D coordinates in pixels
                                Point point = new Point();

                                if (drawState == KinectDraw.Camera)
                                {
                                    // Skeleton-to-Color mapping
                                    ColorImagePoint colorPoint = kinectSensor.CoordinateMapper.MapSkeletonPointToColorPoint(skeletonPoint, ColorImageFormat.RgbResolution640x480Fps30);

                                    point.X = colorPoint.X;
                                    point.Y = colorPoint.Y;
                                }

                                if (drawState == KinectDraw.Depth) // Remember to change the Image and Canvas size to 320x240.
                                {
                                    // Skeleton-to-Depth mapping
                                    DepthImagePoint depthPoint = kinectSensor.CoordinateMapper.MapSkeletonPointToDepthPoint(skeletonPoint, DepthImageFormat.Resolution640x480Fps30);

                                    point.X = depthPoint.X;
                                    point.Y = depthPoint.Y;
                                }

                                Color colorToAdd;

                                if (trackedSkeletonOne != null)
                                {
                                    if (body.TrackingId == trackedSkeletonOne.TrackingId)
                                    {
                                        colorToAdd = Color.Red;
                                    }
                                    else if (body.TrackingId == trackedSkeletonTwo.TrackingId)
                                    {
                                        colorToAdd = Color.Blue;
                                    }
                                    else
                                    {
                                        //something obnoxious
                                        colorToAdd = Color.HotPink;
                                    }
                                }
                                else
                                {
                                    //something obnoxious
                                    colorToAdd = Color.HotPink;
                                }

                                skeletonPoints.Add(new ColoredPoint(point, colorToAdd));
                            }
                        }
                    }
                }
            }
        }
Beispiel #25
0
        void myKinect_AllFramesReady(object sender, AllFramesReadyEventArgs e)
        {
#if SREENSHOT_PAUSE
            if (trackCount == trackLimit)
            {
                return;
            }
#endif

            #region Video image

            // Puts a copy of the video image into the kinect video texture

            using (ColorImageFrame colorFrame = e.OpenColorImageFrame())
            {
                if (colorFrame == null)
                {
                    return;
                }

                if (colorData == null)
                {
                    colorData = new byte[colorFrame.Width * colorFrame.Height * 4];
                }

                colorFrame.CopyPixelDataTo(colorData);

                kinectVideoTexture = new Texture2D(GraphicsDevice, colorFrame.Width, colorFrame.Height);

                Color[] bitmap = new Color[colorFrame.Width * colorFrame.Height];

                int sourceOffset = 0;

                for (int i = 0; i < bitmap.Length; i++)
                {
                    bitmap[i] = new Color(colorData[sourceOffset + 2],
                                          colorData[sourceOffset + 1], colorData[sourceOffset], 255);
                    sourceOffset += 4;
                }

                kinectVideoTexture.SetData(bitmap);
            }

            #endregion

            #region Skeleton

            // Finds the currently active skeleton

            using (SkeletonFrame frame = e.OpenSkeletonFrame())
            {
                if (frame == null)
                {
                    return;
                }
                else
                {
                    skeletons = new Skeleton[frame.SkeletonArrayLength];
                    frame.CopySkeletonDataTo(skeletons);
                }
            }

            activeSkeletonNumber = 0;

            for (int i = 0; i < skeletons.Length; i++)
            {
                if (skeletons[i].TrackingState == SkeletonTrackingState.Tracked)
                {
                    activeSkeletonNumber = i + 1;
                    activeSkeleton       = skeletons[i];
                    break;
                }
            }

            #endregion

            #region Depth image

            // Creates a game background image with transparent regions
            // where the player is displayed

            using (DepthImageFrame depthFrame = e.OpenDepthImageFrame())
            {
                // Get the depth data

                if (depthFrame == null)
                {
                    return;
                }

                if (depthData == null)
                {
                    depthData = new short[depthFrame.Width * depthFrame.Height];
                }

                depthFrame.CopyPixelDataTo(depthData);

                // Create the mask from the background image

                gameImageTexture.GetData(maskImageColors);

                if (activeSkeletonNumber != 0)
                {
                    for (int depthPos = 0; depthPos < depthData.Length; depthPos++)
                    {
                        // find a player to mask - split off bottom bits
                        int playerNo = depthData[depthPos] & 0x07;

                        if (playerNo == activeSkeletonNumber)
                        {
                            // We have a player to mask

                            // find the X and Y positions of the depth point
                            int x = depthPos % depthFrame.Width;
                            int y = depthPos / depthFrame.Width;

                            // get the X and Y positions in the video feed
                            ColorImagePoint playerPoint = myKinect.MapDepthToColorImagePoint(
                                DepthImageFormat.Resolution320x240Fps30, x, y, depthData[depthPos], ColorImageFormat.RgbResolution640x480Fps30);

                            // Map the player coordinates into our lower resolution background
                            // Have to do this because the lowest resultion for the color camera is 640x480

                            playerPoint.X /= 2;
                            playerPoint.Y /= 2;

                            // convert this into an offset into the mask color data
                            int gameImagePos = (playerPoint.X + (playerPoint.Y * depthFrame.Width));
                            if (gameImagePos < maskImageColors.Length)
                            {
                                // make this point in the mask transparent
                                maskImageColors[gameImagePos] = Color.FromNonPremultiplied(0, 0, 0, 0);
                            }
                        }
                    }
                }

                gameMaskTexture = new Texture2D(GraphicsDevice, depthFrame.Width, depthFrame.Height);
                gameMaskTexture.SetData(maskImageColors);
            }

            #endregion
        }
        private void DecodeSkeletonData(AllFramesReadyEventArgs e, KinectSensor sensor)
        {
            #region GetImageFormat

            ColorImageFormat colorFormat = ColorImageFormat.Undefined;
            int colorWidth  = 0;
            int colorHeight = 0;

            DepthImageFormat depthFormat = DepthImageFormat.Undefined;
            int depthWidth  = 0;
            int depthHeight = 0;

            switch (this._imageType)
            {
            case ImageType.Color:
                // Retrieve the current color format, from the frame if present, and from the sensor if not.
                using (ColorImageFrame colorImageFrame = e.OpenColorImageFrame())
                {
                    if (null != colorImageFrame)
                    {
                        colorFormat = colorImageFrame.Format;
                        colorWidth  = colorImageFrame.Width;
                        colorHeight = colorImageFrame.Height;
                    }
                    else if (null != sensor.ColorStream)
                    {
                        colorFormat = sensor.ColorStream.Format;
                        colorWidth  = sensor.ColorStream.FrameWidth;
                        colorHeight = sensor.ColorStream.FrameHeight;
                    }
                }

                break;

            case ImageType.Depth:
                // Retrieve the current depth format, from the frame if present, and from the sensor if not.
                using (DepthImageFrame depthImageFrame = e.OpenDepthImageFrame())
                {
                    if (null != depthImageFrame)
                    {
                        depthFormat = depthImageFrame.Format;
                        depthWidth  = depthImageFrame.Width;
                        depthHeight = depthImageFrame.Height;
                    }
                    else if (null != sensor.DepthStream)
                    {
                        depthFormat = sensor.DepthStream.Format;
                        depthWidth  = sensor.DepthStream.FrameWidth;
                        depthHeight = sensor.DepthStream.FrameHeight;
                    }
                }

                break;
            }

            #endregion

            // Clear the play canvas
            this.playField.Children.Clear();

            // Check every skeleton
            for (int skeletonSlot = 0; skeletonSlot < this._skeletonBuffer.Length; skeletonSlot++)
            {
                var skeleton = this._skeletonBuffer[skeletonSlot];

                #region Skeleton Position

                // Map points between skeleton and color/depth
                var jointMapping = this._jointMappings[skeletonSlot];
                jointMapping.Clear();

                try
                {
                    // Transform the data into the correct space
                    // For each joint, we determine the exact X/Y coordinates for the target view
                    foreach (Joint joint in skeleton.Joints)
                    {
                        ColorImagePoint colorPoint = sensor.CoordinateMapper.MapSkeletonPointToColorPoint(joint.Position, colorFormat);

                        Point mappedPoint = new Point(
                            (int)(this._renderSize.Width * colorPoint.X / colorWidth),
                            (int)(this._renderSize.Height * colorPoint.Y / colorHeight));

                        jointMapping[joint.JointType] = new JointMapping
                        {
                            Joint       = joint,
                            MappedPoint = mappedPoint,
                            OriginPoint = colorPoint,
                        };
                    }
                }
                catch (UnauthorizedAccessException)
                {
                    // Kinect is no longer available.
                    return;
                }

                // Look up the center point
                Point centerPoint = PositionCalculator.Get2DPosition(
                    sensor,
                    this._imageType,
                    this._renderSize,
                    skeleton.Position,
                    colorFormat,
                    colorWidth,
                    colorHeight,
                    depthFormat,
                    depthWidth,
                    depthHeight);

                #endregion

                // Scale the skeleton thickness
                // 1.0 is the desired size at 640 width
                this._scaleFactor = this._renderSize.Width / colorWidth;

                // Displays a gradient near the edge of the display
                // where the skeleton is leaving the screen
                this.DrawClippedEdges(skeleton);

                switch (skeleton.TrackingState)
                {
                case SkeletonTrackingState.PositionOnly:
                {
                    // The skeleton is being tracked, but we only know the general position, and
                    // we do not know the specific joint locations.
                    this.DrawBodyCenter(centerPoint);
                }
                break;

                case SkeletonTrackingState.Tracked:
                {
                    // The skeleton is being tracked and the joint data is available for consumption.
                    this.DrawBody(skeleton, jointMapping);

                    // Track player
                    this.TrackPlayer(skeleton, jointMapping);
                }
                break;
                }
            }
        }
Beispiel #27
0
        private void OnAllFramesReady(object sender, AllFramesReadyEventArgs allFramesReadyEventArgs)
        {
            ColorImageFrame colorImageFrame = null;
            DepthImageFrame depthImageFrame = null;
            SkeletonFrame   skeletonFrame   = null;

            File.AppendAllText("mouseLog.txt", DateTime.Now + " - All Kinect frames ready.\n");
            try
            {
                colorImageFrame = allFramesReadyEventArgs.OpenColorImageFrame();
                depthImageFrame = allFramesReadyEventArgs.OpenDepthImageFrame();
                skeletonFrame   = allFramesReadyEventArgs.OpenSkeletonFrame();

                if (colorImageFrame == null || depthImageFrame == null || skeletonFrame == null)
                {
                    File.AppendAllText("mouseLog.txt", DateTime.Now + " - Color- depth or Skeletonframe is null. Aborting Frame.\n");
                    return;
                }

                // Check for image format changes.  The FaceTracker doesn't
                // deal with that so we need to reset.
                if (this.depthImageFormat != depthImageFrame.Format)
                {
                    this.depthImage       = null;
                    this.depthImageFormat = depthImageFrame.Format;
                }

                if (this.colorImageFormat != colorImageFrame.Format)
                {
                    this.colorImage       = null;
                    this.colorImageFormat = colorImageFrame.Format;
                }

                // Create any buffers to store copies of the data we work with
                if (this.depthImage == null)
                {
                    this.depthImage = new short[depthImageFrame.PixelDataLength];
                }

                if (this.colorImage == null)
                {
                    this.colorImage = new byte[colorImageFrame.PixelDataLength];
                }


                // Get the skeleton information
                if (this.SkeletonData == null || this.SkeletonData.Length != skeletonFrame.SkeletonArrayLength)
                {
                    this.SkeletonData = new Skeleton[skeletonFrame.SkeletonArrayLength];
                }

                colorImageFrame.CopyPixelDataTo(this.colorImage);
                depthImageFrame.CopyPixelDataTo(this.depthImage);
                skeletonFrame.CopySkeletonDataTo(this.SkeletonData);
                Skeleton activeSkeleton = null;
                activeSkeleton = (from skel in this.SkeletonData where skel.TrackingState == SkeletonTrackingState.Tracked select skel).FirstOrDefault();

                //Idea: Separate Eye-Parts of Color Image
                //Use learning Algorithm for right and left eye
                //Detect blink on separated parts of color Image

                //colorImage is one dimensional array with 640 x 480 x 4 (RGBA) values

                if (activeSkeleton != null)
                {
                    File.AppendAllText("mouseLog.txt", DateTime.Now + " - Skeleton is there. Trying to find face.\n");
                    FaceTrackFrame currentFaceFrame = faceTracker.Track(ColorImageFormat.RgbResolution640x480Fps30, colorImage, depthImageFormat, depthImage, activeSkeleton);
                    if (currentFaceFrame.TrackSuccessful)
                    {
                        File.AppendAllText("mouseLog.txt", DateTime.Now + " - Recognized face successfully.\n");
                    }
                    else
                    {
                        File.AppendAllText("mouseLog.txt", DateTime.Now + " - Couldn't find face in frame.\n");
                    }

                    //Get relevant Points for blink detection
                    //Left eye
                    int    minX    = (int)Math.Round(currentFaceFrame.GetProjected3DShape()[FeaturePoint.AboveOneFourthLeftEyelid].X);
                    int    minY    = (int)Math.Round(currentFaceFrame.GetProjected3DShape()[FeaturePoint.AboveOneFourthLeftEyelid].Y);
                    int    maxX    = (int)Math.Round(currentFaceFrame.GetProjected3DShape()[FeaturePoint.BelowThreeFourthLeftEyelid].X);
                    int    maxY    = (int)Math.Round(currentFaceFrame.GetProjected3DShape()[FeaturePoint.BelowThreeFourthLeftEyelid].Y);
                    Bitmap leftEye = EyeExtract(colorImageFrame, currentFaceFrame, minX, minY, maxX, maxY, false);
                    //this.pbRight.BeginInvoke((MethodInvoker)(() => this.pbRight.Image = leftEye));
                    //

                    //Right eye
                    minX = (int)Math.Round(currentFaceFrame.GetProjected3DShape()[FeaturePoint.AboveThreeFourthRightEyelid].X);
                    minY = (int)Math.Round(currentFaceFrame.GetProjected3DShape()[FeaturePoint.AboveThreeFourthRightEyelid].Y);
                    maxX = (int)Math.Round(currentFaceFrame.GetProjected3DShape()[FeaturePoint.OneFourthBottomRightEyelid].X);
                    maxY = (int)Math.Round(currentFaceFrame.GetProjected3DShape()[FeaturePoint.OneFourthBottomRightEyelid].Y);

                    Bitmap rightEye  = EyeExtract(colorImageFrame, currentFaceFrame, minX, minY, maxX, maxY, true);
                    Bitmap leftEye2  = null;
                    Bitmap rightEye2 = null;
                    if (leftEye != null)
                    {
                        leftEye2 = new Bitmap(leftEye);
                    }
                    if (rightEye != null)
                    {
                        rightEye2 = new Bitmap(rightEye);
                    }
                    // System.Delegate d = new MethodInvoker(SetPictures));
                    //   this.Invoke(SetPictures, leftEye);
                    //pbRight.Image = rightEye;
                    this.pbLeft.BeginInvoke((MethodInvoker)(() => this.pbLeft.Image = rightEye2));
                    this.pbLeft.BeginInvoke((MethodInvoker)(() => this.pbRight.Image = leftEye2));
                    // this.Invoke(new MethodInvoker(SetPictures));
                    //Wende Kantenfilter auf die beiden Augen an.

                    if (rightEye != null && leftEye != null)
                    {
                        Dictionary <string, int> angleCount;
                        Bitmap edgePicRight   = Convolution(ConvertGrey(rightEye), true, out angleCount);
                        bool   rightEyeClosed = IsEyeClosed(angleCount);
                        Bitmap edgePicLeft    = Convolution(ConvertGrey(leftEye), false, out angleCount);
                        bool   leftEyeClosed  = IsEyeClosed(angleCount);
                        //   pbLeftFaltung.Image = edgePicLeft;
                        //   pbRightFaltung.Image = edgePicRight;



                        if (rightEyeClosedHistory.Count > 100)
                        {
                            rightEyeClosedHistory.RemoveAt(0);
                        }
                        if (leftEyeClosedHistory.Count > 100)
                        {
                            leftEyeClosedHistory.RemoveAt(0);
                        }
                        leftEyeClosedHistory.Add(leftEyeClosed);
                        rightEyeClosedHistory.Add(rightEyeClosed);

                        //If Face is rotated, move Mouse
                        if (headRotationHistory.Count > gaussFilter.Count - 1 && leftEyeClosedHistory.Count > nudConvolutionFilterLength.Value && currentFaceFrame.TrackSuccessful)
                        {
                            int   x = 0;
                            int   y = 0;
                            float browRaiserValue  = currentFaceFrame.GetAnimationUnitCoefficients()[AnimationUnit.BrowRaiser];
                            float browLowererValue = currentFaceFrame.GetAnimationUnitCoefficients()[AnimationUnit.BrowLower];
                            float mouthOpenValue   = currentFaceFrame.GetAnimationUnitCoefficients()[AnimationUnit.JawLower];
                            if (browRaiserHistory.Count >= 100)
                            {
                                browRaiserHistory.RemoveAt(0);
                                browLowererHistory.RemoveAt(0);
                                mouthOpenHistory.RemoveAt(0);
                            }
                            browLowererHistory.Add(browLowererValue);
                            browRaiserHistory.Add(browRaiserValue);
                            mouthOpenHistory.Add(mouthOpenValue);

                            //Method 1: Ohne Glättung
                            //ScaleXY(currentFaceFrame.Rotation, out x, out y);
                            //MouseControl.Move(x, y);

                            ////Method 2: Glättung über die letzten x Bilder:
                            //int i = 0;
                            //Vector3DF rotationMedium = new Vector3DF();
                            //while (i < 10 && headRotationHistory.Count - 1 > i)
                            //{
                            //    i++;
                            //    rotationMedium.X += headRotationHistory[headRotationHistory.Count - 1 - i].X;
                            //    rotationMedium.Y += headRotationHistory[headRotationHistory.Count - 1 - i].Y;
                            //}
                            //rotationMedium.X = rotationMedium.X / i;
                            //rotationMedium.Y = rotationMedium.Y / i;
                            //ScaleXY(rotationMedium, out x, out y);
                            //MouseControl.Move(x, y);

                            //Method 3: Gauß-Filter: Gewichte die letzten Bilder stärker.
                            Vector3DF rotationMedium = new Vector3DF();
                            rotationMedium.X = currentFaceFrame.Rotation.X * gaussFilter[0];
                            rotationMedium.Y = currentFaceFrame.Rotation.Y * gaussFilter[0];
                            int i = 0;
                            while (i < gaussFilter.Count - 1)
                            {
                                rotationMedium.X += (headRotationHistory[headRotationHistory.Count - 1 - i].X * gaussFilter[i]);
                                rotationMedium.Y += (headRotationHistory[headRotationHistory.Count - 1 - i].Y * gaussFilter[i]);
                                i++;
                            }
                            rotationMedium.X = (float)(rotationMedium.X / gaussFactor);
                            rotationMedium.Y = (float)(rotationMedium.Y / gaussFactor);
                            ScaleXY(rotationMedium, out x, out y);


                            //Method 4: Quadratische Glättung
                            //double deltaX = ((-currentFaceFrame.Rotation.Y) - (-headRotationHistory.Last().Y));
                            //double deltaY = ((-currentFaceFrame.Rotation.X) - (-headRotationHistory.Last().X));
                            //if (deltaX < 0)
                            //    deltaX = -Math.Pow(deltaX, 2) * 4;
                            //else
                            //    deltaX = Math.Pow(deltaX, 2) * 4;
                            //if (deltaY < 0)
                            //    deltaY = -Math.Pow(deltaY, 2) * 5;
                            //else
                            //    deltaY = Math.Pow(deltaY, 2) * 5;
                            //MouseControl.DeltaMove((int)Math.Round(deltaX, 0), (int)Math.Round(deltaY));


                            //Check for right, left or Double Click
                            //1. Check if there was already a click 20 Frames ago, or if Drag & Drop is active
                            if (clickDelay > nudClickDelay.Value && !pointNClickActive)
                            {
                                //2. If not, calculate mean values of dy's last 16 Frames
                                if (CalculateMeanConvolutionValues())
                                {
                                    clickDelay = 0;
                                }
                                else
                                {
                                    //Else check for open Mouth
                                    if (mouthOpenValue > (float)nudMouthOpenStartThreshold.Value && mouthOpenHistory[mouthOpenHistory.Count - 2] > (float)nudMouthOpenConfirmation.Value && mouthOpenHistory[mouthOpenHistory.Count - 3] > (float)nudMouthOpenConfirmation.Value && mouthOpenHistory[mouthOpenHistory.Count - 4] > (float)nudMouthOpenConfirmation.Value)
                                    {
                                        MouseControl.Move(mousePositionHistory[mousePositionHistory.Count - 4].X, mousePositionHistory[mousePositionHistory.Count - 4].Y);
                                        this.lbAction.Invoke((MethodInvoker)(() => this.lbAction.Items.Add("Left Mouse Down on X: " + mousePositionHistory[mousePositionHistory.Count - 4].X + " Y: " + mousePositionHistory[mousePositionHistory.Count - 4].Y)));
                                        //lbAction.Items.Add("Left Mouse Down on X: " + mousePositionHistory[mousePositionHistory.Count - 4].X + " Y: " + mousePositionHistory[mousePositionHistory.Count - 4].Y);
                                        MouseControl.MouseDownLeft();
                                        pointNClickActive = true;
                                        clickDelay        = 0;
                                    }
                                }
                            }
                            else if (pointNClickActive)
                            {
                                if (mouthOpenValue < (float)nudMouthOpenEndThreshold.Value)
                                {
                                    this.lbAction.Invoke((MethodInvoker)(() => this.lbAction.Items.Add("Left Mouse Up on X: " + x + " Y: " + y)));
                                    MouseControl.MouseUpLeft();
                                    pointNClickActive = false;
                                    clickDelay        = 0;
                                }
                            }
                            MouseControl.Move(x, y);
                            if (browLowererValue > (float)nudBrowLowererStartThreshold.Value)
                            {
                                MouseControl.ScrollDown((int)(-browLowererValue * (int)nudScrollMultiplierDown.Value));
                            }
                            if (browRaiserValue > (float)nudBrowRaiserStartThreshold.Value)
                            {
                                MouseControl.ScrollDown((int)(browRaiserValue * (int)nudScrollMultiplierUp.Value));
                            }
                            if (mousePositionHistory.Count > 100)
                            {
                                mousePositionHistory.RemoveAt(0);
                            }
                            mousePositionHistory.Add(new Microsoft.Kinect.Toolkit.FaceTracking.Point(x, y));
                            File.AppendAllText("mouseLog.txt", DateTime.Now + " - Face and eyes successfully tracked.\n");
                        }
                    }
                    else
                    {
                        File.AppendAllText("mouseLog.txt", DateTime.Now + " - Face recognized but couldn't find eye in face.\n");
                    }
                    clickDelay++;

                    headRotationHistory.Add(currentFaceFrame.Rotation);
                    if (headRotationHistory.Count >= 100)
                    {
                        headRotationHistory.RemoveAt(0);
                    }
                }
                else
                {
                    File.AppendAllText("mouseLog.txt", DateTime.Now + " - Active Skeleton is null. Couldn't analyze frame.\n");
                }
            }
            catch (Exception e)
            {
                File.AppendAllText("mouseLog.txt", DateTime.Now + " - Error during frame analyzation.\n" + e.ToString());
            }
            finally
            {
                if (colorImageFrame != null)
                {
                    colorImageFrame.Dispose();
                }

                if (depthImageFrame != null)
                {
                    depthImageFrame.Dispose();
                }

                if (skeletonFrame != null)
                {
                    skeletonFrame.Dispose();
                }
            }
        }
        void sensor_AllFramesReady(object sender, AllFramesReadyEventArgs e)
        {
            using (DepthImageFrame depthFrame = e.OpenDepthImageFrame())
            {
                if (depthFrame == null)
                {
                    return;
                }

                int minDepth = 850;
                int maxDepth = 4000;

                System.Drawing.Bitmap outBmp = new System.Drawing.Bitmap(160, 160);
                BitmapSource          depthBitmapSource;
                BitmapSource          processedBitmapSource;

                //Get the position of interest on the depthmap from skeletal tracking
                DepthImagePoint rightHandPoint = jointTracker.GetJointPosition(kinectSensorChooser.Kinect, e, JointType.HandRight);


                if (jointTracker.JointDetected == true)
                {
                    textResult.Text = "Right hand is being tracked";

                    int rightHandDepth = rightHandPoint.Depth;
                    if (rightHandDepth < 850)
                    {
                        minDepth = 850;
                        maxDepth = 1500;
                    }
                    else
                    {
                        minDepth = rightHandDepth - 75;
                        maxDepth = rightHandDepth + 75;
                    }

                    depthBitmapSource = sliceDepthImage(depthFrame, minDepth, maxDepth);

                    //Create a bitmap from the depth information
                    System.Drawing.Bitmap depthBmp = depthBitmapSource.ToBitmap();

                    //Aforge performs image processing here.
                    outBmp = imageProcessor.ProcessFrame(depthBmp, rightHandPoint.X, rightHandPoint.Y);
                }
                else
                {
                    textResult.Text = "No hand detected";

                    //depthBitmapSource = sliceDepthImage(depthFrame, 850, 1500);

                    System.Drawing.Graphics g = System.Drawing.Graphics.FromImage(outBmp);
                    g.Clear(System.Drawing.Color.Black);
                }

                //Create a bitmapsource to show the processed image
                processedBitmapSource = outBmp.ToBitmapSource();

                //Display the images
                procImageDisplay.Source = processedBitmapSource;
            }
        }
Beispiel #29
0
        private void KinectSensorOnAllFramesReady(object sender, AllFramesReadyEventArgs allFramesReadyEventArgs)
        {
            if (++fpsGlobal < FPSGlobal)
            {
                return;
            }
            fpsGlobal = 0;

            using (var depthFrame = allFramesReadyEventArgs.OpenDepthImageFrame())
                using (var colorFrame = allFramesReadyEventArgs.OpenColorImageFrame())
                    using (var skeletonFrame = allFramesReadyEventArgs.OpenSkeletonFrame()) {
                        if (null == depthFrame || null == colorFrame || null == skeletonFrame)
                        {
                            return;
                        }

                        AllFrameWatch.Again();

                        // Init ONCE with provided data
                        InitFrames(depthFrame, colorFrame, skeletonFrame);
                        if (!init)
                        {
                            return;
                        }

                        // Backup frames (motion)
                        depthFrame.CopyPixelDataTo(Depth.Pixelss);

                        // Motion check
                        if (Task == null || Task.StandBy)
                        {
                            AllFrameWatch.Stop();
                            return;
                        }

                        // Copy computed depth
                        if (++fpsDepth >= FPSDepth)
                        {
                            fpsDepth = 0;
                            // depthFrame.CopyDepthImagePixelDataTo(this.DepthPixels);
                            Depth.Stamp.Time = System.DateTime.Now;
                        }

                        // Copy color data
                        if (++fpsColor >= FPSColor)
                        {
                            fpsColor = 0;
                            colorFrame.CopyPixelDataTo(Color.Pixels);
                            Color.Stamp.Time = System.DateTime.Now;
                        }

                        // Copy skeleton data
                        if (++fpsSkeleton >= FPSSkeleton)
                        {
                            fpsSkeleton = 0;
                            skeletonFrame.CopySkeletonDataTo((Skeleton[])Skeletons.RawData);
                            Skeletons.Stamp.Time = System.DateTime.Now;
                        }

                        // Convert Joint 3D to 2D on 1st skeleton
                        if (++fpsJoints >= FPSJoints)
                        {
                            fpsJoints = 0;
                            RefreshBodyData(Skeletons);
                        }

                        AllFrameWatch.Stop();
                    }
        }
Beispiel #30
0
        void nui_AllFramesReady(object sender, AllFramesReadyEventArgs e)
        {
            SkeletonFrame sf = e.OpenSkeletonFrame();

            if (sf == null)
            {
                return;
            }
            Skeleton[] skeletonData = new Skeleton[sf.SkeletonArrayLength];
            sf.CopySkeletonDataTo(skeletonData);
            using (DepthImageFrame depthImageFrame = e.OpenDepthImageFrame())
            {
                if (depthImageFrame != null)
                {
                    foreach (Skeleton sd in skeletonData)
                    {
                        if (sd.TrackingState == SkeletonTrackingState.Tracked)
                        {
                            int     nMax   = 20;
                            Joint[] joints = new Joint[nMax];
                            for (int j = 0; j < nMax; j++)
                            {
                                joints[j] = sd.Joints[(JointType)j];
                            }
                            Point[] points = new Point[nMax];
                            for (int j = 0; j < nMax; j++)
                            {
                                DepthImagePoint depthPoint;
                                depthPoint = depthImageFrame.MapFromSkeletonPoint(joints[j].Position);
                                points[j]  = new Point((int)(image1.Width *
                                                             depthPoint.X / depthImageFrame.Width),
                                                       (int)(image1.Height *
                                                             depthPoint.Y / depthImageFrame.Height));
                            }

                            PointCollection pc0 = new PointCollection(4);
                            pc0.Add(points[(int)JointType.HipCenter]);
                            pc0.Add(points[(int)JointType.Spine]);
                            pc0.Add(points[(int)JointType.ShoulderCenter]);
                            pc0.Add(points[(int)JointType.Head]);
                            m_poly[0].Points     = pc0;
                            m_poly[0].Visibility = Visibility.Visible; //엉덩이부터 머리까지 연결되는 선

                            PointCollection pc1 = new PointCollection(5);
                            pc1.Add(points[(int)JointType.ShoulderCenter]);
                            pc1.Add(points[(int)JointType.ShoulderLeft]);
                            pc1.Add(points[(int)JointType.ElbowLeft]);
                            pc1.Add(points[(int)JointType.WristLeft]);
                            pc1.Add(points[(int)JointType.HandLeft]);
                            m_poly[1].Points     = pc1;
                            m_poly[1].Visibility = Visibility.Visible; //왼손부터 어깨까지 연결되는 선

                            PointCollection pc2 = new PointCollection(5);
                            pc2.Add(points[(int)JointType.ShoulderCenter]);
                            pc2.Add(points[(int)JointType.ShoulderRight]);
                            pc2.Add(points[(int)JointType.ElbowRight]);
                            pc2.Add(points[(int)JointType.WristRight]);
                            pc2.Add(points[(int)JointType.HandRight]);
                            m_poly[2].Points     = pc2;
                            m_poly[2].Visibility = Visibility.Visible; // 오른손부터 어깨까지 연결되는 선

                            PointCollection pc3 = new PointCollection(5);
                            pc3.Add(points[(int)JointType.HipCenter]);
                            pc3.Add(points[(int)JointType.HipLeft]);
                            pc3.Add(points[(int)JointType.KneeLeft]);
                            pc3.Add(points[(int)JointType.AnkleLeft]);
                            pc3.Add(points[(int)JointType.FootLeft]);
                            m_poly[3].Points     = pc3;
                            m_poly[3].Visibility = Visibility.Visible; // 왼발부터 엉덩이까지 연결되는 선

                            PointCollection pc4 = new PointCollection(5);
                            pc4.Add(points[(int)JointType.HipCenter]);
                            pc4.Add(points[(int)JointType.HipRight]);
                            pc4.Add(points[(int)JointType.KneeRight]);
                            pc4.Add(points[(int)JointType.AnkleRight]);
                            pc4.Add(points[(int)JointType.FootRight]);
                            m_poly[4].Points     = pc4;
                            m_poly[4].Visibility = Visibility.Visible; // 오른발부터 엉덩이까지 연결되는 선
                        }
                    }
                }
            }
        }
        private void KinectAllFramesReady(object sender, AllFramesReadyEventArgs e)
        {
            // Have we already been "shut down" by the user of this viewer,
            // or has the SkeletonStream been disabled since this event was posted?
            if ((this.Kinect == null) || !((KinectSensor)sender).SkeletonStream.IsEnabled)
            {
                return;
            }

            bool haveSkeletonData = false;

            using (SkeletonFrame skeletonFrame = e.OpenSkeletonFrame())
            {
                if (skeletonFrame != null)
                {
                    if (this.skeletonCanvases == null)
                    {
                        this.CreateListOfSkeletonCanvases();
                    }

                    if ((this.skeletonData == null) || (this.skeletonData.Length != skeletonFrame.SkeletonArrayLength))
                    {
                        this.skeletonData = new Skeleton[skeletonFrame.SkeletonArrayLength];
                    }

                    skeletonFrame.CopySkeletonDataTo(this.skeletonData);

                    haveSkeletonData = true;
                }
            }

            if (haveSkeletonData)
            {
                using (DepthImageFrame depthImageFrame = e.OpenDepthImageFrame())
                {
                    if (depthImageFrame != null)
                    {
                        int trackedSkeletons = 0;

                        foreach (Skeleton skeleton in this.skeletonData)
                        {
                            Dictionary <JointType, JointMapping> jointMapping = this.jointMappings[trackedSkeletons];
                            jointMapping.Clear();

                            KinectSkeleton skeletonCanvas = this.skeletonCanvases[trackedSkeletons++];
                            skeletonCanvas.ShowBones  = this.ShowBones;
                            skeletonCanvas.ShowJoints = this.ShowJoints;
                            skeletonCanvas.ShowCenter = this.ShowCenter;

                            //Burası

                            // Transform the data into the correct space
                            // For each joint, we determine the exact X/Y coordinates for the target view
                            foreach (Joint joint in skeleton.Joints)
                            {
                                //TODO:
                                if (joint.JointType == JointType.Head)
                                {
                                    HeadX = joint.Position.X;
                                    HeadY = joint.Position.Y;
                                    HeadZ = joint.Position.Z;
                                }

                                if (joint.JointType == JointType.HandRight)
                                {
                                    if (joint.Position.X * joint.Position.Y != 0)
                                    {
                                        RightHandX = joint.Position.X;
                                        RightHandY = joint.Position.Y;
                                    }
                                }

                                if (joint.JointType == JointType.HandLeft)
                                {
                                    if (joint.Position.X * joint.Position.Y != 0)
                                    {
                                        LeftHandX = joint.Position.X;
                                        LeftHandY = joint.Position.Y;
                                    }
                                }

                                Point mappedPoint = this.GetPosition2DLocation(depthImageFrame, joint.Position);
                                jointMapping[joint.JointType] = new JointMapping
                                {
                                    Joint       = joint,
                                    MappedPoint = mappedPoint
                                };
                            }

                            // Look up the center point
                            Point centerPoint = this.GetPosition2DLocation(depthImageFrame, skeleton.Position);

                            // Scale the skeleton thickness
                            // 1.0 is the desired size at 640 width
                            double scale = this.RenderSize.Width / 640;

                            skeletonCanvas.RefreshSkeleton(skeleton, jointMapping, centerPoint, scale);
                        }

                        //TODO:
                        FrameReloaded();

                        if (ImageType == ImageType.Depth)
                        {
                            this.ChooseTrackedSkeletons(this.skeletonData);
                        }
                    }
                }
            }
        }
Beispiel #32
0
        void Sensor_AllFramesReady(object sender, AllFramesReadyEventArgs e)
        {
            // Color
            using (var frame = e.OpenColorImageFrame())
            {
                if (frame != null)
                {
                    if (_mode == CameraMode.Color)
                    {
                        camera.Source = frame.ToBitmap();
                    }
                }
            }

            // Depth
            using (var frame = e.OpenDepthImageFrame())
            {
                if (frame != null)
                {
                    if (_mode == CameraMode.Depth)
                    {
                        camera.Source = frame.ToBitmap();
                    }
                }
            }

            // Body
            using (var frame = e.OpenSkeletonFrame())
            {
                if (frame != null)
                {
                    canvas.Children.Clear();

                    frame.CopySkeletonDataTo(_bodies);

                    foreach (var body in _bodies)
                    {
                        if (body.TrackingState == SkeletonTrackingState.Tracked)
                        {
                            // COORDINATE MAPPING
                            foreach (Joint joint in body.Joints)
                            {
                                // 3D coordinates in meters
                                SkeletonPoint skeletonPoint = joint.Position;

                                // 2D coordinates in pixels
                                Point point = new Point();

                                if (_mode == CameraMode.Color)
                                {
                                    // Skeleton-to-Color mapping
                                    ColorImagePoint colorPoint = _sensor.CoordinateMapper.MapSkeletonPointToColorPoint(skeletonPoint, ColorImageFormat.RgbResolution640x480Fps30);

                                    point.X = colorPoint.X;
                                    point.Y = colorPoint.Y;
                                }
                                else if (_mode == CameraMode.Depth) // Remember to change the Image and Canvas size to 320x240.
                                {
                                    // Skeleton-to-Depth mapping
                                    DepthImagePoint depthPoint = _sensor.CoordinateMapper.MapSkeletonPointToDepthPoint(skeletonPoint, DepthImageFormat.Resolution320x240Fps30);

                                    point.X = depthPoint.X;
                                    point.Y = depthPoint.Y;
                                }

                                // DRAWING...
                                Ellipse ellipse = new Ellipse
                                {
                                    Fill   = Brushes.LightGray,
                                    Width  = 20,
                                    Height = 20
                                };

                                Canvas.SetLeft(ellipse, point.X - ellipse.Width / 2);
                                Canvas.SetTop(ellipse, point.Y - ellipse.Height / 2);

                                canvas.Children.Add(ellipse);
                            }
                        }
                    }
                }
            }
        }
        private void KinectAllFramesReady(object sender, AllFramesReadyEventArgs e)
        {
            KinectSensor sensor = sender as KinectSensor;

            foreach (var skeletonCanvas in this.skeletonCanvases)
            {
                skeletonCanvas.Skeleton = null;
            }

            // Have we already been "shut down" by the user of this viewer,
            // or has the SkeletonStream been disabled since this event was posted?
            if ((null == this.KinectSensorManager) ||
                (null == sensor) ||
                (null == sensor.SkeletonStream) ||
                !sensor.SkeletonStream.IsEnabled)
            {
                return;
            }

            bool haveSkeletonData = false;
            long frameTimeStamp   = -1;

            using (SkeletonFrame skeletonFrame = e.OpenSkeletonFrame())
            {
                if (skeletonFrame != null)
                {
                    if ((this.skeletonData == null) || (this.skeletonData.Length != skeletonFrame.SkeletonArrayLength))
                    {
                        this.skeletonData = new Skeleton[skeletonFrame.SkeletonArrayLength];
                    }

                    skeletonFrame.CopySkeletonDataTo(this.skeletonData);

                    frameTimeStamp = skeletonFrame.Timestamp;

                    haveSkeletonData = true;
                }
            }

            int trackedIndex = -1;

            // find the first tracked skeleton and set var trackedSkeleton accordingly
            for (int i = 0; i < skeletonData.Length; i++)
            {
                if (skeletonData[i].TrackingState.Equals(SkeletonTrackingState.Tracked))
                {
                    trackedIndex = i;
                    break;
                }
            }


            bool isFullyTracked = false;

            if (isMeasuring && trackedIndex > -1)
            {
                // check to see if the skeleton @ trackedIndex is fully tracked
                if (fullyTrackedMapping == null && IsFullyTracked(skeletonData[trackedIndex]))
                {
                    isFullyTracked = true;
                }

                SkeletonMeasurer measurer = new SkeletonMeasurer(skeletonData[trackedIndex]);
                measurer.determineMeasurements();
                AddMeasurementsToBuffer(measurer.TestMeasurements);

                skeletonBuffer.Add(ObjectCopier.Clone <Skeleton>(skeletonData[trackedIndex]));
                frameTimeStampBuffer.Add(frameTimeStamp);
            }

            if (haveSkeletonData)
            {
                ColorImageFormat colorFormat = ColorImageFormat.Undefined;
                int colorWidth  = 0;
                int colorHeight = 0;

                DepthImageFormat depthFormat = DepthImageFormat.Undefined;
                int depthWidth  = 0;
                int depthHeight = 0;

                switch (this.ImageType)
                {
                case ImageType.Color:
                    // Retrieve the current color format, from the frame if present, and from the sensor if not.
                    using (ColorImageFrame colorImageFrame = e.OpenColorImageFrame())
                    {
                        if (null != colorImageFrame)
                        {
                            colorFormat = colorImageFrame.Format;
                            colorWidth  = colorImageFrame.Width;
                            colorHeight = colorImageFrame.Height;
                        }
                        else if (null != sensor.ColorStream)
                        {
                            colorFormat = sensor.ColorStream.Format;
                            colorWidth  = sensor.ColorStream.FrameWidth;
                            colorHeight = sensor.ColorStream.FrameHeight;
                        }
                    }

                    break;

                case ImageType.Depth:
                    // Retrieve the current depth format, from the frame if present, and from the sensor if not.
                    using (DepthImageFrame depthImageFrame = e.OpenDepthImageFrame())
                    {
                        if (null != depthImageFrame)
                        {
                            depthFormat = depthImageFrame.Format;
                            depthWidth  = depthImageFrame.Width;
                            depthHeight = depthImageFrame.Height;
                        }
                        else if (null != sensor.DepthStream)
                        {
                            depthFormat = sensor.DepthStream.Format;
                            depthWidth  = sensor.DepthStream.FrameWidth;
                            depthHeight = sensor.DepthStream.FrameHeight;
                        }
                    }

                    break;
                }

                for (int i = 0; i < this.skeletonData.Length && i < this.skeletonCanvases.Count; i++)
                {
                    var skeleton       = this.skeletonData[i];
                    var skeletonCanvas = this.skeletonCanvases[i];
                    var jointMapping   = this.jointMappings[i];

                    jointMapping.Clear();

                    try
                    {
                        // Transform the data into the correct space
                        // For each joint, we determine the exact X/Y coordinates for the target view
                        foreach (Joint joint in skeleton.Joints)
                        {
                            Point mappedPoint = Get2DPosition(
                                sensor,
                                this.ImageType,
                                this.RenderSize,
                                joint.Position,
                                colorFormat,
                                colorWidth,
                                colorHeight,
                                depthFormat,
                                depthWidth,
                                depthHeight);

                            jointMapping[joint.JointType] = new JointMapping
                            {
                                Joint       = joint,
                                MappedPoint = mappedPoint
                            };
                        }
                    }
                    catch (UnauthorizedAccessException)
                    {
                        // Kinect is no longer available.
                        return;
                    }

                    // Look up the center point
                    Point centerPoint = Get2DPosition(
                        sensor,
                        this.ImageType,
                        this.RenderSize,
                        skeleton.Position,
                        colorFormat,
                        colorWidth,
                        colorHeight,
                        depthFormat,
                        depthWidth,
                        depthHeight);

                    // Scale the skeleton thickness
                    // 1.0 is the desired size at 640 width
                    double scale = this.RenderSize.Width / 640;

                    skeletonCanvas.Skeleton      = skeleton;
                    skeletonCanvas.JointMappings = jointMapping;
                    skeletonCanvas.Center        = centerPoint;
                    skeletonCanvas.ScaleFactor   = scale;
                }

                if (isFullyTracked)
                {
                    fullyTrackedMapping = new Dictionary <JointType, JointMapping>();

                    foreach (JointType type in jointMappings[trackedIndex].Keys)
                    {
                        fullyTrackedMapping[type]             = new JointMapping();
                        fullyTrackedMapping[type].Joint       = jointMappings[trackedIndex][type].Joint;
                        fullyTrackedMapping[type].MappedPoint = jointMappings[trackedIndex][type].MappedPoint;
                    }
                }
            }
        }
Beispiel #34
0
        // Handle Privacy Screen
        private void KinectSensorOnAllFramesReady(object sender, AllFramesReadyEventArgs allFramesReadyEventArgs)
        {
            var colorImageFrame = allFramesReadyEventArgs.OpenColorImageFrame();
            var depthImageFrame = allFramesReadyEventArgs.OpenDepthImageFrame();
            //var skeltImageFrame = allFramesReadyEventArgs.OpenSkeletonFrame();
            try
            {
                using (var skeletonFrame = allFramesReadyEventArgs.OpenSkeletonFrame())
                {
                    if (null != skeletonFrame)
                    {
                        skeletonFrame.CopySkeletonDataTo(this.skeletons);
                        ChoosePrivacySkeleton();
                    }
                }
                {
                    if (colorImageFrame == null)
                    {
                        return;
                    }
                    if (depthImageFrame == null)
                    {
                        return;
                    }
                    // if (skeltImageFrame == null)
                    //    {
                    //      return;
                    //   }
              //      Console.WriteLine(s);
                    depthRange = 300;
                    if (s != null)
                    {
                        double min= 50;
                        double max = 0;
                        depthRange = 300;
                        foreach (Joint joint in s.Joints)
                        {
                            ColorImagePoint point = ks.CoordinateMapper.MapSkeletonPointToColorPoint(joint.Position, ColorImageFormat.RgbResolution640x480Fps30);
                            if (point.X > -1000000 && point.X < 1000000 && point.Y > -1000000 && point.Y < 1000000)
                            {
                                Joint j = joint;
                                double depth = joint.Position.Z;

                                if (depth < min)
                                {
                                    min = depth;
                                }
                                if (depth > max)
                                {
                                    max = depth;
                                }
                            }
                        }
                        currentDepth = min * 1000 - 800 - 100 - 50;
                        depthRange = (int)(max * 1000 - 800 - 100 - currentDepth);
                    /*    Joint jointHead = s.Joints[JointType.Head];
                        Joint jointCenter = s.Joints[JointType.ShoulderCenter];
                        ColorImagePoint pointHead = ks.CoordinateMapper.MapSkeletonPointToColorPoint(jointHead.Position, ColorImageFormat.RgbResolution640x480Fps30);
                        ColorImagePoint pointCenter = ks.CoordinateMapper.MapSkeletonPointToColorPoint(jointCenter.Position, ColorImageFormat.RgbResolution640x480Fps30);
                        Console.WriteLine(jointHead.Position.Z * 1000);
                        if (pointHead.X > -1000000 && pointHead.X < 1000000 && pointHead.Y > -1000000 && pointHead.Y < 1000000)
                        {
                            currentDepth = jointHead.Position.Z * 1000 - 700 - 100;
                            depthRange = 200;
                        }
                        else if (pointCenter.X > -1000000 && pointCenter.X < 1000000 && pointCenter.Y > -1000000 && pointCenter.Y < 1000000)
                        {
                            currentDepth = jointCenter.Position.Z * 1000 - 700 - 100;
                            depthRange = 200;
                        }*/
                    }

                    DepthImagePixel[] depthPixels = new DepthImagePixel[depthImageFrame.PixelDataLength];
                    depthImageFrame.CopyDepthImagePixelDataTo(depthPixels);
                    int minDepth = depthImageFrame.MinDepth;
                    int maxDepth = depthImageFrame.MaxDepth;
                    var ratio = colorImageFrame.PixelDataLength / depthImageFrame.PixelDataLength;
                    var heightC = colorImageFrame.Height;
                    var heightD = depthImageFrame.Height;
                    var LengthC = colorImageFrame.PixelDataLength;
                    var LengthD = depthPixels.Length;
                    var ratH = colorImageFrame.Height / depthImageFrame.Height;
                    var ratW = colorImageFrame.Width / depthImageFrame.Width;

                    // Make a copy of the color frame for displaying.
                    var haveNewFormat = this.currentColorImageFormat != colorImageFrame.Format;
                    if (haveNewFormat)
                    {

                        this.currentColorImageFormat = colorImageFrame.Format;
                        this.colorImageData = new byte[colorImageFrame.PixelDataLength];

                        this.colorImageWritableBitmap = new WriteableBitmap(
                            colorImageFrame.Width, colorImageFrame.Height, 96, 96, PixelFormats.Bgr32, null);
                        ColorImage.Source = this.colorImageWritableBitmap;
                    }

                    colorImageFrame.CopyPixelDataTo(this.colorImageData);
                    newColorImageData = new byte[this.colorImageData.Length];
                    int tempMinDepth = (int)(minDepth + currentDepth);
                    int tempMaxDepth = (int)(minDepth + currentDepth + depthRange);
                    for (int i = 0; i < colorImageFrame.Width; ++i)
                    {
                        int srcX = i / ratW;
                        for (int j = 0; j < colorImageFrame.Height; ++j)
                        {
                            int srcY = j / ratH;
                            int srcPixel = srcX + 2 + ((srcY - 15) * depthImageFrame.Width);
                            int tgtPixel = (i + (j * colorImageFrame.Width));
                            int l = depthPixels.Length;
                            
                            if (srcPixel >= 0 && srcPixel < l)
                            {
                                //      currentDepth = currentDepth + .00001;
                                short depth = depthPixels[(int)srcPixel].Depth;
                                /*
                                if (depth < tempMinDepth)
                                {
                                    //changePixel(tgtPixel, 0);
                                    int index = tgtPixel * 4;
                                    this.colorImageData[index++] = 0;
                                    this.colorImageData[index++] = 0;
                                    this.colorImageData[index++] = 0;
                                    //changePixel(tgtPixel, new byte[]{255, 255, 255});
                                }
                                //else if (depth > maxDepth)
                                else if (depth > tempMaxDepth)
                                {
                                   // changePixel(tgtPixel, 0);
                                    int index = tgtPixel * 4;
                                    this.colorImageData[index++] = 0;
                                    this.colorImageData[index++] = 0;
                                    this.colorImageData[index++] = 0;
                                }
                                else
                                {
                                    //changePixel(tgtPixel, 0);
                                }
                                if (currentDepth + depthRange >= maxDepth + 300)
                                {
                                    currentDepth = minDepth;
                                }
                                */
                                if (depth > tempMinDepth && depth < tempMaxDepth)
                                {
                                    int index = tgtPixel * 4;
                                    newColorImageData[index] = this.colorImageData[index++];
                                    newColorImageData[index] = this.colorImageData[index++];
                                    newColorImageData[index] = this.colorImageData[index++];
                                }
                            }
                        }
                    }
                    /*
                    this.colorImageWritableBitmap.WritePixels(
                        new Int32Rect(0, 0, colorImageFrame.Width, colorImageFrame.Height),
                        this.colorImageData,
                        colorImageFrame.Width * Bgr32BytesPerPixel,
                        0);
                    // Write the pixel data into our bitmap
                    this.foregroundBitmap.WritePixels(
                        new Int32Rect(0, 0, this.foregroundBitmap.PixelWidth, this.foregroundBitmap.PixelHeight),
                        backgroundRemovedFrame.GetRawPixelData(),
                        this.foregroundBitmap.PixelWidth * sizeof(int),
                        0);
                    */
                    this.colorImageWritableBitmap.WritePixels(
                        new Int32Rect(0, 0, colorImageFrame.Width, colorImageFrame.Height),
                        this.newColorImageData,
                        colorImageFrame.Width * sizeof(int),
                        0);
                }
            }
            finally
            {
                if (colorImageFrame != null)
                {
                    colorImageFrame.Dispose();
                }
                if (depthImageFrame != null)
                {
                    depthImageFrame.Dispose();
                }
                //    if (skeltImageFrame != null)
                //    {
                //        skeltImageFrame.Dispose();
                //    }
            }
          //  Console.WriteLine(s);
         //   ChoosePrivacySkeleton();
        }
Beispiel #35
0
        private void FaceTrackOperation(object sender, AllFramesReadyEventArgs e)
        {
            using (var colorImageFrame = e.OpenColorImageFrame())
            {
                if (colorImageFrame == null)
                {
                    return;
                }

                // Make a copy of the color frame for displaying.
                var haveNewFormat = this.currentColorImageFormat != colorImageFrame.Format;
                if (haveNewFormat)
                {
                    this.currentColorImageFormat = colorImageFrame.Format;
                    this.colorImageData = new byte[colorImageFrame.PixelDataLength];
                    this.colorImageWritableBitmap = new WriteableBitmap(
                        colorImageFrame.Width, colorImageFrame.Height, 96, 96, PixelFormats.Bgr32, null);
                    ColorImage.Source = this.colorImageWritableBitmap;
                }

                colorImageFrame.CopyPixelDataTo(this.colorImageData);
                this.colorImageWritableBitmap.WritePixels(
                    new Int32Rect(0, 0, colorImageFrame.Width, colorImageFrame.Height),
                    this.colorImageData,
                    colorImageFrame.Width * Bgr32BytesPerPixel,
                    0);
            }

            using (ColorImageFrame colorImageFrame = e.OpenColorImageFrame())
            {
                if (colorImageFrame == null)
                    return;
                colorImageFrame.CopyPixelDataTo(colorPixelData);
            }

            using (DepthImageFrame depthImageFrame = e.OpenDepthImageFrame())
            {
                if (depthImageFrame == null)
                    return;
                depthImageFrame.CopyPixelDataTo(depthPixelData);
            }

            using (SkeletonFrame skeletonFrame = e.OpenSkeletonFrame())
            {
                if (skeletonFrame == null)
                    return;
                skeletonFrame.CopySkeletonDataTo(skeletonData);
            }

            var skeleton = skeletonData.FirstOrDefault(s => s.TrackingState == SkeletonTrackingState.Tracked);
            if (skeleton == null)
                return;

            FaceTrackFrame faceFrame = faceTracker.Track(sensorChooser.Kinect.ColorStream.Format, colorPixelData,
                                            sensorChooser.Kinect.DepthStream.Format, depthPixelData,
                                            skeleton);

            // If a face is tracked, then we can use it.
            if (faceFrame.TrackSuccessful)
            {
                // Retrieve only the Animation Units coeffs.

                //CanvasTranslate.X = faceFrame.Translation.X;
                //CanvasTranslate.Y = faceFrame.Translation.Y;
                Vector3DF faceRotation = faceFrame.Rotation;

                var AUCoeff = faceFrame.GetAnimationUnitCoefficients();

                var jawLower = AUCoeff[AnimationUnit.JawLower];
                var BrowLower = AUCoeff[AnimationUnit.BrowLower];
                var BrowUpper = AUCoeff[AnimationUnit.BrowRaiser];
                var lcd = AUCoeff[AnimationUnit.LipCornerDepressor];
                var lipRaiser = AUCoeff[AnimationUnit.LipRaiser];
                var lipStrectch = AUCoeff[AnimationUnit.LipStretcher];
                var Pitch = faceRotation.X - midPitch;
                var Yaw = faceRotation.Y - midYaw;
                var Roll = faceRotation.Z - midRoll;

                if (Pitch < midPitch + 10 && Pitch > midPitch - 10)
                {
                    isHeadDown = 0;
                    state[1] += 3;
                }
                if (Roll < midRoll + 10 && Roll > midRoll - 10)
                {
                    isTilt = 0;
                    state[1]++;
                }
                if (Yaw < midYaw + 10 && Yaw > midYaw - 10)
                {
                    isHeadAway = 0;
                    state[1]++;
                }

                if (Pitch > midPitch+10)
                {
                    if (isHeadDown <= 0) timesNodded++;
                    isHeadDown = 1;
                    state[0]+=3;

                }
                if (Pitch < midPitch-10)
                {
                    if (isHeadDown >= 0) timesNodded++;
                    isHeadDown = -1;
                    state[2]+=3;
                    //timesNodded++;
                }

                if (Yaw > midYaw+10 )
                {
                    if (isHeadAway <= 0) timesShook++;
                    isHeadAway = 1;
                    state[3]+=3;
                    //timesShook++;
                }
                if (Yaw < midYaw-10 )
                {
                    if (isHeadAway >= 0) timesShook++;
                    isHeadAway = -1;
                    //timesShook++;
                    state[5]+=3;
                }

                if (Roll > midRoll+10)
                {
                    state[6]+=3;
                    isTilt = 1;
                    //timesShook++;
                }
                if (Roll < midRoll-10)
                {
                    state[8]+=3;
                    isTilt = -1;
                    //timesShook++;
                }

                //textBox2.Text = "P: " + ((float)Pitch).ToString() + " Y: " + ((float)Yaw).ToString() + " R: " + ((float)Roll).ToString();
                //textBox2.Text = "JL: " + ((float)jawLower).ToString() + " BL: " + ((float)BrowLower).ToString() + " BU: " + ((float)BrowUpper).ToString();
                //dataToBeSent3 = "lcd: " + ((float)lcd).ToString() + " LR: " + ((float)lipRaiser).ToString() + " LS: " + ((float)lipStrectch).ToString();

                //pitch(nod) - +-15 roll(tilt) - +-20 yaw(shaking offs- -10) +-15
                //textBox2.Text += " isTilt " + isTilt + " isHeadAway " + isHeadAway + " isHeadDown " + isHeadDown;
            }
        }
Beispiel #36
0
        private void nui_AllFramesReady(object sender, AllFramesReadyEventArgs e)
        { //DepthStream과 SkeletonStream에 동시에 추가한 이벤트 핸들러
            SkeletonFrame sf = e.OpenSkeletonFrame();

            //현재 인식한 골격정보(스캘리톤)을 sf라는 변수에 저장
            if (sf == null)
            {
                return;
            }
            //골격이 인식되지 못하면, 즉 사람이 인식되지 않으면 보여줄 상자, 이를 이은 선을 보여줄 필요가 없으므로 종료
            Skeleton[] skeletonData = new Skeleton[sf.SkeletonArrayLength];
            //스캘리톤(골격정보)를 저장하라 배열을 인식된 수만큼 크기 지정
            sf.CopySkeletonDataTo(skeletonData);
            //현재 SkeletonFrame안에서 스캘리톤 데이터를 복사
            using (DepthImageFrame depthImageFrame = e.OpenDepthImageFrame())
            {                              //현재 DepthImageFrame 안에서 데이터 복사
                if (depthImageFrame != null)
                {                          //데이터가 있다면
                    foreach (Skeleton sd in skeletonData)
                    {                      //복사해온 스캘리톤 데이터를 하나씩 차근차근 반복한다
                        if (sd.TrackingState == SkeletonTrackingState.Tracked)
                        {                  //인식된 사용자 인덱스 중 추적이 되는 Tracked에 해당하는 골격정보만 사용
                            int nMax = 20; //키넥트가 인식하는 관절 포인트가 20개 이므로

                            Joint[] joints = new Joint[nMax];
                            //만들 조인트만큼(키넥트가 인식하는 것만큼> 20) 조인트 배열 선언
                            for (int j = 0; j < nMax; j++)
                            { //조인트 생성(초기화 및 설정)
                                joints[j] = sd.Joints[(JointType)j];
                            }
                            //여기까지 찾아진 골격상태에서 각 골격의 정보를 얻어오기 위함

                            Point[] points = new Point[nMax];
                            //뼈대 위치를 저장할 배열
                            for (int j = 0; j < nMax; j++)
                            {
                                DepthImagePoint depthPoint;
                                depthPoint = depthImageFrame.MapFromSkeletonPoint(joints[j].Position);
                                points[j]  = new Point((int)((user_img.Width * depthPoint.X / depthImageFrame.Width) + 340),
                                                       (int)(user_img.Height * depthPoint.Y / depthImageFrame.Height));
                            }
                            //여기까지 각 조인트정보로부터 좌표를 얻기 위함

                            for (int j = 0; j < nMax; j++)
                            {
                                m_rect[j].Visibility = Visibility.Visible;
                                //아까 만들었던 사각형을 보이도록 함
                                Canvas.SetTop(m_rect[j],
                                              points[j].Y - (m_rect[j].Height / 2));
                                Canvas.SetLeft(m_rect[j], points[j].X - (m_rect[j].Width / 2));
                                //사각형 배치
                            }
                            //여기까지 가져온 각 조인트의 정보를 화면에 표시하는 코드


                            //엉덩이 중앙부터 머리까지 연결되는 선을 긋는 코드
                            PointCollection pc0 = new PointCollection(4);
                            //이을 관절 개수만큼 크기를 지정하고 위치를 저장할  변수
                            pc0.Add(points[(int)JointType.HipCenter]);      //엉덩이 중간
                            pc0.Add(points[(int)JointType.Spine]);          //등뼈
                            pc0.Add(points[(int)JointType.ShoulderCenter]); //어깨 중간
                            pc0.Add(points[(int)JointType.Head]);           //머리
                            m_poly[0].Points     = pc0;                     //관절들 위치를 저장한 변수를 선으로 그리는 함수로 선언한 배열에 넣는다
                            m_poly[0].Visibility = Visibility.Visible;
                            //넣었으니 활성화시켜 위치를 토대로 그린 선을 보여준다


                            //왼쪽 손부터 어깨까지 연결되는 선을 긋는 코드
                            PointCollection pc1 = new PointCollection(5);
                            pc1.Add(points[(int)JointType.ShoulderCenter]); //어깨 중간
                            pc1.Add(points[(int)JointType.ShoulderLeft]);   //왼쪽 어깨
                            pc1.Add(points[(int)JointType.ElbowLeft]);      //왼쪽 팔꿈치
                            pc1.Add(points[(int)JointType.WristLeft]);      //왼쪽 손목
                            pc1.Add(points[(int)JointType.HandLeft]);       //왼손
                            m_poly[1].Points     = pc1;
                            m_poly[1].Visibility = Visibility.Visible;


                            //오른쪽 손부터 어깨까지 연결되는 선을 긋는 코드
                            PointCollection pc2 = new PointCollection(5);
                            pc2.Add(points[(int)JointType.ShoulderCenter]); //어깨 중간
                            pc2.Add(points[(int)JointType.ShoulderRight]);  //오른쪽 어깨
                            pc2.Add(points[(int)JointType.ElbowRight]);     //오른쪽 팔꿈치
                            pc2.Add(points[(int)JointType.WristRight]);     //오른쪽 손목
                            pc2.Add(points[(int)JointType.HandRight]);      //오른손
                            m_poly[2].Points     = pc2;
                            m_poly[2].Visibility = Visibility.Visible;


                            //왼발부터 엉덩이까지 연결되는 선을 긋는 코드
                            PointCollection pc3 = new PointCollection(5);
                            pc3.Add(points[(int)JointType.HipCenter]); //엉덩이 중간
                            pc3.Add(points[(int)JointType.HipLeft]);   //엉덩이 왼쪽
                            pc3.Add(points[(int)JointType.KneeLeft]);  //왼쪽 무릎
                            pc3.Add(points[(int)JointType.AnkleLeft]); //왼쪽 발목
                            pc3.Add(points[(int)JointType.FootLeft]);  //왼쪽 발
                            m_poly[3].Points     = pc3;
                            m_poly[3].Visibility = Visibility.Visible;


                            //오른발부터 엉덩이까지 연결되는 선을 긋는 코드
                            PointCollection pc4 = new PointCollection(5);
                            pc4.Add(points[(int)JointType.HipCenter]);  //엉덩이 중간
                            pc4.Add(points[(int)JointType.HipRight]);   //엉덩이 오른쪽
                            pc4.Add(points[(int)JointType.KneeRight]);  //오른쪽 무릎
                            pc4.Add(points[(int)JointType.AnkleRight]); //오른쪽 발목
                            pc4.Add(points[(int)JointType.FootRight]);  //오른쪽 발
                            m_poly[4].Points     = pc4;
                            m_poly[4].Visibility = Visibility.Visible;
                        }
                    }
                }
            }
        }
Beispiel #37
0
        void kinectSensor_AllFramesReady(object sender, AllFramesReadyEventArgs e)
        {
            // Retrieve each single frame and copy the data
            using (ColorImageFrame colorImageFrame = e.OpenColorImageFrame())
            {
                if (colorImageFrame == null)
                {
                    return;
                }
                colorImageFrame.CopyPixelDataTo(colorPixelData);
                //int strade = colorImageFrame.Width * 4;
                //image1.Source = BitmapSource.Create(colorImageFrame.Width, colorImageFrame.Height, 96, 96,
                //                                    PixelFormats.Bgr32, null, colorPixelData, strade);
            }

            using (DepthImageFrame depthImageFrame = e.OpenDepthImageFrame())
            {
                if (depthImageFrame == null)
                {
                    return;
                }
                depthImageFrame.CopyPixelDataTo(depthPixelData);
            }

            using (SkeletonFrame skeletonFrame = e.OpenSkeletonFrame())
            {
                if (skeletonFrame == null)
                {
                    return;
                }
                skeletonFrame.CopySkeletonDataTo(skeletonData);
            }

            // Retrieve the first tracked skeleton if any. Otherwise, do nothing.
            var skeleton = skeletonData.FirstOrDefault(s => s.TrackingState == SkeletonTrackingState.Tracked);

            if (skeleton == null && !sessionClose)
            {
                serialP.WriteLine("s");
                serialP.WriteLine("c");
                serialP.WriteLine("p");
                serialP.WriteLine("g");
                if (isActive)
                {
                    isActive = false;
                }

                slejenie           = false;
                activatorRightHand = 0;
                activatorLeftHand  = false;
                firstMeet          = false;

                sessionClose = true;
                return;
            }
            else if (skeleton != null && !firstMeet)
            {
                serialP.WriteLine("i");
                playsound(comms[0]);
                firstMeet    = true;
                sessionClose = false;
            }
            if (sessionClose)
            {
                return;
            }
            // Make the faceTracker processing the data.
            FaceTrackFrame faceFrame = faceTracker.Track(kinectSensor.ColorStream.Format, colorPixelData,
                                                         kinectSensor.DepthStream.Format, depthPixelData,
                                                         skeleton);

            EnumIndexableCollection <FeaturePoint, PointF> facePoints = faceFrame.GetProjected3DShape();


            // points of hands and shoulder - to determine HELLO, etc.
            Joint shoulderCenter = skeleton.Joints[JointType.ShoulderCenter];
            Joint head           = skeleton.Joints[JointType.Head];
            Joint rightHand      = skeleton.Joints[JointType.HandRight];
            Joint leftHand       = skeleton.Joints[JointType.HandLeft];

            // initialize sound for hello
            //SoundPlayer a = new SoundPlayer("C:\\sal.wav");


            // open stream for uart reading
            //serialP.Open();

            // points of lip's corner - with help of this I determine smile
            double x1 = facePoints[88].X;
            double y1 = facePoints[88].Y;

            System.Windows.Point leftLip = new System.Windows.Point(x1, y1);
            double x2 = facePoints[89].X;
            double y2 = facePoints[89].Y;

            System.Windows.Point rightLip = new System.Windows.Point(x2, y2);
            Vector subtr = System.Windows.Point.Subtract(leftLip, rightLip);

            // distance between kinect and human
            distance = skeleton.Position.Z * 100;

            // distance between two corners of lip
            double length = Math.Sqrt(subtr.X * subtr.X + subtr.Y * subtr.Y);

            int check = 100;

            double angle1 = 0d;
            double angle2 = 0d;
            double angle  = skeleton.Position.X * 100;

            #region "Smile deterine"
            if (distance >= 95 && distance < 110)
            {
                check = 22;
            }
            else if (distance >= 110 && distance < 120)
            {
                check = 19;
            }
            else if (distance >= 120 && distance < 130)
            {
                check = 18;
            }
            else if (distance >= 130 && distance < 140)
            {
                check = 17;
            }
            else if (distance >= 140 && distance < 150)
            {
                check = 16;
            }
            else if (distance >= 150 && distance < 160)
            {
                check = 14;
            }
            else if (distance >= 160 && distance < 170)
            {
                check = 13;
            }
            else if (distance >= 170 && distance < 180)
            {
                check = 12;
            }
            else if (distance >= 180 && distance < 190)
            {
                check = 11;
            }

            #endregion

            #region "Angle"
            if (distance >= 90 && distance < 110)
            {
                angle1 = -15;
                angle2 = 15;
            }
            else if (distance >= 110 && distance < 150)
            {
                angle1 = -20;
                angle2 = 20;
            }
            else if (distance >= 150 && distance < 170)
            {
                angle1 = -30;
                angle2 = 30;
            }
            else if (distance >= 170 && distance < 200)
            {
                angle1 = -35;
                angle2 = 35;
            }
            else if (distance >= 200)
            {
                angle1 = -40;
                angle2 = 40;
            }
            #endregion

            double condition1 = Math.Abs(leftHand.Position.Z * 100 - shoulderCenter.Position.Z * 100);
            double condition2 = Math.Abs(rightHand.Position.Z * 100 - shoulderCenter.Position.Z * 100);

            // If position of two hands higher than shoulder it's activate 'slejenie za ob'ektom'
            if (condition1 > 45 &&
                condition2 > 45 &&
                leftHand.Position.X < rightHand.Position.X)
            {
                if (!slejenie)
                {
                    isActive       = true;
                    FIXED_DISTANCE = distance;
                    slejenie       = true;
                }
            }

            // The command to stop 'slejenie za ob'ektom'
            if (leftHand.Position.X > rightHand.Position.X)
            {
                isActive = false;
            }

            // Slejenie za ob'ektom
            if (isActive)
            {
                int pinkIs   = (int)typeCondition.THIRD;
                int purpleIs = (int)typeCondition.FORTH;
                int redIs    = (int)typeCondition.FIVTH;
                int yellowIs = (int)typeCondition.SIXTH;

                if (distance > FIXED_DISTANCE + 10.0d)
                {
                    if (angle < angle1)
                    {
                        ellipseSmile.Fill = Brushes.Pink;
                        if (currentAction != pinkIs)//povorot na pravo
                        {
                            currentAction = pinkIs;
                            serialP.WriteLine("r");
                        }
                    }
                    else if (angle > angle2)//povorot na levo
                    {
                        ellipseSmile.Fill = Brushes.Purple;
                        if (currentAction != purpleIs)
                        {
                            currentAction = purpleIs;
                            serialP.WriteLine("l");
                        }
                    }
                    else
                    {
                        ellipseSmile.Fill = Brushes.Red;
                        if (currentAction != redIs)// vpered
                        {
                            currentAction = redIs;
                            serialP.WriteLine("f");
                        }
                    }
                }
                else if (distance > 90)
                {
                    if (angle < angle1)
                    {
                        ellipseSmile.Fill = Brushes.Pink;
                        if (currentAction != pinkIs)//na pravo
                        {
                            currentAction = pinkIs;
                            serialP.WriteLine("r");
                        }
                    }
                    else if (angle > angle2)
                    {
                        ellipseSmile.Fill = Brushes.Purple;
                        if (currentAction != purpleIs)// na levo
                        {
                            currentAction = purpleIs;
                            serialP.WriteLine("l");
                        }
                    }
                    else
                    {
                        ellipseSmile.Fill = Brushes.Yellow;
                        if (currentAction != yellowIs)//stop, ili - do nothing
                        {
                            currentAction = yellowIs;
                            serialP.WriteLine("s");
                        }
                    }
                }
                else
                {
                    ellipseSmile.Fill = Brushes.Yellow;
                    if (currentAction != yellowIs)//stop, ili - do nothing
                    {
                        currentAction = yellowIs;
                        serialP.WriteLine("s");
                    }
                }
            }


            // esli 'slejenie za ob'ektom' otklu4en
            else if (!isActive)
            {
                int blueIs  = (int)typeCondition.FIRST;
                int blackIs = (int)typeCondition.SECOND;
                int onkol   = (int)typeCondition.SEVENTH;

                if (leftHand.Position.Y > head.Position.Y && rightHand.Position.Y < shoulderCenter.Position.Y)
                {
                    ellipseSmile.Fill = Brushes.Blue;
                    if (currentAction != blueIs && !activatorLeftHand)//privet levoi rukoi ----------------------------------------------------------------------------

                    {
                        currentAction = blueIs;
                        serialP.WriteLine("q");
                        activatorLeftHand = true;
                    }
                }

                else if (rightHand.Position.Y > head.Position.Y && leftHand.Position.Y < shoulderCenter.Position.Y)
                {
                    ellipseSmile.Fill = Brushes.Blue;
                    if (currentAction != onkol && activatorRightHand != 12)//privet pravoi rukoi   -----------------------------------------------------------------------------
                    {
                        currentAction = onkol;
                        serialP.WriteLine("w");
                        activatorRightHand = 12;
                    }
                }

                else
                {
                    ellipseSmile.Fill = Brushes.Black;
                    if (currentAction != blackIs)// toktaidy ili do nothing
                    {
                        currentAction = blackIs;
                        serialP.WriteLine("s");
                    }


                    if (currentAction == blackIs)
                    {
                        if (length >= check && currentFace != (int)faceConditions.FIRST)
                        {
                            serialP.WriteLine("z"); // smile
                            currentFace       = (int)faceConditions.FIRST;
                            ellipseSmile.Fill = Brushes.Brown;
                        }
                        else if (length < check && currentFace != (int)faceConditions.SECOND)
                        {
                            serialP.WriteLine("x"); // poker face
                            currentFace       = (int)faceConditions.SECOND;
                            ellipseSmile.Fill = Brushes.Gold;
                        }

                        #region "povoroti golovoi"
                        if (angle < angle1)
                        {
                            ellipseSmile.Fill = Brushes.Pink;
                            if (!headToRight)//povorot golovi na pravo
                            {
                                headToRight  = true;
                                headToCenter = false;
                                headToLeft   = false;
                                serialP.WriteLine("k");
                            }
                        }
                        else if (angle > angle2)//povorot golovi na levo
                        {
                            if (!headToLeft)
                            {
                                headToLeft   = true;
                                headToCenter = false;
                                headToRight  = false;
                                serialP.WriteLine("j");
                            }
                        }
                        else if (angle < angle2 && angle > angle1)//golova v centre
                        {
                            if (!headToCenter)
                            {
                                headToCenter = true;
                                headToRight  = false;
                                headToLeft   = false;
                                serialP.WriteLine("p");
                            }
                        }
                        #endregion
                    }
                    else if (!faceFrame.TrackSuccessful && currentFace != (int)faceConditions.NONE)
                    {
                        serialP.WriteLine("c"); // sad face
                        currentFace       = (int)faceConditions.NONE;
                        ellipseSmile.Fill = Brushes.Chocolate;
                    }
                }
            }

            label2.Content = distance.ToString();
            //label1.Content = (leftHand.Position.Z * 100).ToString();
            //label3.Content = (shoulderCenter.Position.Z * 100).ToString();

            //serialP.Close();
        }
        void GetCameraPoint(Skeleton first, AllFramesReadyEventArgs e)
        {
            using (DepthImageFrame depth = e.OpenDepthImageFrame())
            {
                if (depth == null ||
                    kinectSensorChooser1.Kinect == null)
                {
                    return;
                }


                //Map a joint location to a point on the depth map
                //head

                /*DepthImagePoint headDepthPoint =
                 *  depth.MapFromSkeletonPoint(first.Joints[JointType.Head].Position);*/
                //left hand
                DepthImagePoint leftDepthPoint =
                    depth.MapFromSkeletonPoint(first.Joints[JointType.HandLeft].Position);
                //right hand
                DepthImagePoint rightDepthPoint =
                    depth.MapFromSkeletonPoint(first.Joints[JointType.HandRight].Position);


                //Map a depth point to a point on the color image
                //head

                /*ColorImagePoint headColorPoint =
                 *  depth.MapToColorImagePoint(headDepthPoint.X, headDepthPoint.Y,
                 *  ColorImageFormat.RgbResolution640x480Fps30);*/
                //left hand
                ColorImagePoint leftColorPoint =
                    depth.MapToColorImagePoint(leftDepthPoint.X, leftDepthPoint.Y,
                                               ColorImageFormat.RgbResolution640x480Fps30);
                //right hand
                ColorImagePoint rightColorPoint =
                    depth.MapToColorImagePoint(rightDepthPoint.X, rightDepthPoint.Y,
                                               ColorImageFormat.RgbResolution640x480Fps30);



                //Set location
                //CameraPosition(headImage, headColorPoint);
                CameraPosition(leftEllipse, leftColorPoint);
                CameraPosition(rightEllipse, rightColorPoint);

                // do the points
                bool leftValid = false; bool rightValid = false;
                leftEllipse  = doDepthCalculation(leftEllipse, leftDepthPoint, 1250, true, out leftValid);
                rightEllipse = doDepthCalculation(rightEllipse, rightDepthPoint, 1250, false, out rightValid);

                // play / turn off the right notes
                // right hand
                if (rightValid)
                {
                    int           rectangleIndex    = -1;
                    NoteRectangle thisNoteRectangle = getNoteRectangleAtPoint(rightColorPoint, false, out rectangleIndex);
                    thisNoteRectangle.PlayNote();
                    thisNoteRectangle.Rectangle.Fill = Brushes.Green;
                    rightHandLabel.Content           = thisNoteRectangle.Pitch.ToString();
                    for (int x = 0; x < rightNotes.Count; x++)
                    {
                        if (x != rectangleIndex)
                        {
                            rightNotes[x].StopNote();
                            rightNotes[x].Rectangle.Fill = null;
                        }
                    }
                }
                else
                {
                    foreach (NoteRectangle x in rightNotes)
                    {
                        x.StopNote();
                    }
                    int rectangleIndex = -1;
                    getNoteRectangleAtPoint(rightColorPoint, false, out rectangleIndex).StopNote();
                }
            }
        }
Beispiel #39
0
        void GetCameraPoint(Skeleton first, AllFramesReadyEventArgs e)
        {
            using (DepthImageFrame depth = e.OpenDepthImageFrame())
            {
                if (depth == null ||
                    kinectSensorChooser1.Kinect == null)
                {
                    return;
                }

                KinectSensor myKinect = kinectSensorChooser1.Kinect;


                //save coordinates to file
                SaveCoordinatesAssistant(first, myKinect, myKinect.DepthStream.Format);



                //Map a joint location to a point on the depth map
                //head
                DepthImagePoint headDepthPoint =
                    myKinect.CoordinateMapper.MapSkeletonPointToDepthPoint(first.Joints[JointType.Head].Position,
                                                                           myKinect.DepthStream.Format);
                //left knee
                DepthImagePoint leftKneeDepthPoint =
                    myKinect.CoordinateMapper.MapSkeletonPointToDepthPoint(first.Joints[JointType.KneeLeft].Position,
                                                                           myKinect.DepthStream.Format);
                //right knee
                DepthImagePoint rightKneeDepthPoint =
                    myKinect.CoordinateMapper.MapSkeletonPointToDepthPoint(first.Joints[JointType.KneeRight].Position,
                                                                           myKinect.DepthStream.Format);
                //left ankle
                DepthImagePoint leftAnkleDepthPoint =
                    myKinect.CoordinateMapper.MapSkeletonPointToDepthPoint(first.Joints[JointType.AnkleLeft].Position,
                                                                           myKinect.DepthStream.Format);
                //right ankle
                DepthImagePoint rightAnkleDepthPoint =
                    myKinect.CoordinateMapper.MapSkeletonPointToDepthPoint(first.Joints[JointType.AnkleRight].Position,
                                                                           myKinect.DepthStream.Format);
                //left hip
                DepthImagePoint leftHipDepthPoint =
                    myKinect.CoordinateMapper.MapSkeletonPointToDepthPoint(first.Joints[JointType.HipLeft].Position,
                                                                           myKinect.DepthStream.Format);
                //right hip
                DepthImagePoint rightHipDepthPoint =
                    myKinect.CoordinateMapper.MapSkeletonPointToDepthPoint(first.Joints[JointType.HipRight].Position,
                                                                           myKinect.DepthStream.Format);
                //shoulder center (used to check for start, bottom, and end of squat)
                DepthImagePoint shoulderCenterDepthPoint =
                    myKinect.CoordinateMapper.MapSkeletonPointToDepthPoint(first.Joints[JointType.ShoulderCenter].Position,
                                                                           myKinect.DepthStream.Format);


                //Map a depth point to a point on the color image
                //head
                ColorImagePoint headColorPoint =
                    myKinect.CoordinateMapper.MapDepthPointToColorPoint(myKinect.DepthStream.Format, headDepthPoint, ColorImageFormat.RgbResolution640x480Fps30);
                //left knee
                ColorImagePoint leftKneeColorPoint =
                    myKinect.CoordinateMapper.MapDepthPointToColorPoint(myKinect.DepthStream.Format, leftKneeDepthPoint, ColorImageFormat.RgbResolution640x480Fps30);
                //right knee
                ColorImagePoint rightKneeColorPoint =
                    myKinect.CoordinateMapper.MapDepthPointToColorPoint(myKinect.DepthStream.Format, rightKneeDepthPoint, ColorImageFormat.RgbResolution640x480Fps30);
                //left ankle
                ColorImagePoint leftAnkleColorPoint =
                    myKinect.CoordinateMapper.MapDepthPointToColorPoint(myKinect.DepthStream.Format, leftAnkleDepthPoint, ColorImageFormat.RgbResolution640x480Fps30);
                //right ankle
                ColorImagePoint rightAnkleColorPoint =
                    myKinect.CoordinateMapper.MapDepthPointToColorPoint(myKinect.DepthStream.Format, rightAnkleDepthPoint, ColorImageFormat.RgbResolution640x480Fps30);
                //shoulder center
                ColorImagePoint shoulderCenterColorPoint =
                    myKinect.CoordinateMapper.MapDepthPointToColorPoint(myKinect.DepthStream.Format, shoulderCenterDepthPoint, ColorImageFormat.RgbResolution640x480Fps30);


                //Set location
                CameraPosition(headImage, headColorPoint);
                CameraPosition(leftKneeEllipse, leftKneeColorPoint);
                CameraPosition(rightKneeEllipse, rightKneeColorPoint);
                CameraPosition(leftAnkleEllipse, leftAnkleColorPoint);
                CameraPosition(rightAnkleEllipse, rightAnkleColorPoint);
                CameraPosition(shoulderCenterEllipse, shoulderCenterColorPoint);


                //GetPositionInSquat(shoulderCenterDepthPoint.Y, depth.Timestamp);
                //inSquatDisplay.Text = inSquat.ToString();

                double kasr = CalculateKASR(rightAnkleDepthPoint, leftAnkleDepthPoint, rightKneeDepthPoint,
                                            leftKneeDepthPoint, rightHipDepthPoint, leftHipDepthPoint);

                double leftKneeAngle  = AngleBetweenJoints(first.Joints[JointType.HipLeft], first.Joints[JointType.KneeLeft], first.Joints[JointType.AnkleLeft]);
                double rightKneeAngle = AngleBetweenJoints(first.Joints[JointType.HipRight], first.Joints[JointType.KneeRight], first.Joints[JointType.AnkleRight]);

                //Console.WriteLine("angles: (" + leftKneeAngle + "," + rightKneeAngle + ")");

                //add shoulder position to list of previous
                prevShoulderCenters.Add(shoulderCenterDepthPoint.Y);
            }
        }
        private void OnAllFramesReady(object sender, AllFramesReadyEventArgs allFramesReadyEventArgs)
        {
            ColorImageFrame colorImageFrame = null;
            DepthImageFrame depthImageFrame = null;
            SkeletonFrame skeletonFrame = null;

            try
            {
                colorImageFrame = allFramesReadyEventArgs.OpenColorImageFrame();
                depthImageFrame = allFramesReadyEventArgs.OpenDepthImageFrame();
                skeletonFrame = allFramesReadyEventArgs.OpenSkeletonFrame();

                if (colorImageFrame == null || depthImageFrame == null || skeletonFrame == null)
                {
                    return;
                }

                // Check for image format changes.  The FaceTracker doesn't
                // deal with that so we need to reset.
                if (this.depthImageFormat != depthImageFrame.Format)
                {
                    this.ResetFaceTracking();
                    this.depthImage = null;
                    this.depthImageFormat = depthImageFrame.Format;
                }

                if (this.colorImageFormat != colorImageFrame.Format)
                {
                    this.ResetFaceTracking();
                    this.colorImage = null;
                    this.colorImageFormat = colorImageFrame.Format;
                }

                // Create any buffers to store copies of the data we work with
                if (this.depthImage == null)
                {
                    this.depthImage = new short[depthImageFrame.PixelDataLength];
                }

                if (this.colorImage == null)
                {
                    this.colorImage = new byte[colorImageFrame.PixelDataLength];
                }

                // Get the skeleton information
                if (this.skeletonData == null || this.skeletonData.Length != skeletonFrame.SkeletonArrayLength)
                {
                    this.skeletonData = new Skeleton[skeletonFrame.SkeletonArrayLength];
                }

                colorImageFrame.CopyPixelDataTo(this.colorImage);
                depthImageFrame.CopyPixelDataTo(this.depthImage);
                skeletonFrame.CopySkeletonDataTo(this.skeletonData);

                // Update the list of trackers and the trackers with the current frame information
                foreach (Skeleton skeleton in this.skeletonData)
                {
                    if (skeleton.TrackingState == SkeletonTrackingState.Tracked
                        || skeleton.TrackingState == SkeletonTrackingState.PositionOnly)
                    {
                        // We want keep a record of any skeleton, tracked or untracked.
                        if (!this.trackedSkeletons.ContainsKey(skeleton.TrackingId))
                        {
                            this.trackedSkeletons.Add(skeleton.TrackingId, new SkeletonFaceTracker());
                        }

                        // Give each tracker the upated frame.
                        SkeletonFaceTracker skeletonFaceTracker;
                        if (this.trackedSkeletons.TryGetValue(skeleton.TrackingId, out skeletonFaceTracker))
                        {
                            skeletonFaceTracker.OnFrameReady(this.Kinect, colorImageFormat, colorImage, depthImageFormat, depthImage, skeleton);
                            skeletonFaceTracker.LastTrackedFrame = skeletonFrame.FrameNumber;
                        }
                    }
                }

                this.RemoveOldTrackers(skeletonFrame.FrameNumber);

                this.InvalidateVisual();
            }
            finally
            {
                if (colorImageFrame != null)
                {
                    colorImageFrame.Dispose();
                }

                if (depthImageFrame != null)
                {
                    depthImageFrame.Dispose();
                }

                if (skeletonFrame != null)
                {
                    skeletonFrame.Dispose();
                }
            }
        }
Beispiel #41
0
        void GetCameraPoint(Skeleton first, AllFramesReadyEventArgs e)
        {
            using (DepthImageFrame depth = e.OpenDepthImageFrame())
            {
                if (depth == null || mKinect == null)
                {
                    return;
                }

                DepthImagePoint headDepthPoint = mKinect.Kinect.CoordinateMapper.MapSkeletonPointToDepthPoint(first.Joints[JointType.Head].Position, DepthImageFormat.Resolution640x480Fps30);

                DepthImagePoint leftHandDepthPoint = mKinect.Kinect.CoordinateMapper.MapSkeletonPointToDepthPoint(first.Joints[JointType.HandLeft].Position, DepthImageFormat.Resolution640x480Fps30);

                DepthImagePoint rightHandDepthPoint = mKinect.Kinect.CoordinateMapper.MapSkeletonPointToDepthPoint(first.Joints[JointType.HandRight].Position, DepthImageFormat.Resolution640x480Fps30);

                ColorImagePoint headColorPoint = mKinect.Kinect.CoordinateMapper.MapDepthPointToColorPoint(DepthImageFormat.Resolution640x480Fps30, headDepthPoint, ColorImageFormat.RgbResolution1280x960Fps12);

                ColorImagePoint leftHandColorPoint = mKinect.Kinect.CoordinateMapper.MapDepthPointToColorPoint(DepthImageFormat.Resolution640x480Fps30, leftHandDepthPoint, ColorImageFormat.RgbResolution1280x960Fps12);

                ColorImagePoint rightHandColorPoint = mKinect.Kinect.CoordinateMapper.MapDepthPointToColorPoint(DepthImageFormat.Resolution640x480Fps30, rightHandDepthPoint, ColorImageFormat.RgbResolution1280x960Fps12);

                CameraPosition(ellipseHead, headColorPoint, "HEAD");
                CameraPosition(ellipseHandLeft, leftHandColorPoint, "LEFT HAND");
                CameraPosition(ellipseHandRight, rightHandColorPoint, "RIGHT HAND");


                if (cHandRight == 10) //MANO DERECHA PASO 10 VECES
                {
                    dHandOnImage = DateTime.Now;
                    Log("----------------------------------------------------------------------------------------------------");
                    Log("headOnImage - MANO DERECHA");

                    CameraPosition(ellipseHead, headColorPoint, "HEAD - cHandRight");
                    CameraPosition(ellipseHandRight, rightHandColorPoint, "RIGHT HAND - cHandRight");
                }

                if (handOnImage(ellipseHandRight, ellipseHead))
                {
                    Log("----------------------------------------------------------------------------------------------------");
                    Log("handOnHead - BIEN - ++ ");
                    cHandRightOnHead++;
                    labelResult.Content    = "BIEN ++: " + cHandRightOnHead;
                    labelResult.Visibility = Visibility.Visible;
                }

                if (cHandRightOnHead == 10)
                {
                    Log("BIEN");
                    labelResult2.Content       = "BIEN";
                    labelResult2.Visibility    = Visibility.Visible;
                    imageHeadInBody.Visibility = Visibility.Visible;
                    imageHead.Visibility       = Visibility.Hidden;
                }

                Log("cHandRightOnHead" + cHandRightOnHead.ToString());

                if (cHandLeft == 10) //MANO DERECHA PASO 10 VECES
                {
                    dHandOnImage = DateTime.Now;
                    Log("----------------------------------------------------------------------------------------------------");
                    Log("headOnImage - MANO DERECHA");

                    CameraPosition(ellipseHead, headColorPoint, "HEAD - cHandRight");
                    CameraPosition(ellipseHandLeft, leftHandColorPoint, "RIGHT HAND - cHandRight");
                }

                if (handOnImage(ellipseHandLeft, ellipseHead))
                {
                    Log("----------------------------------------------------------------------------------------------------");
                    Log("handOnHead - BIEN - ++ ");
                    cHandLeftOnHead++;
                    labelResult.Content    = "BIEN ++: " + cHandLeftOnHead;
                    labelResult.Visibility = Visibility.Visible;
                }

                if (cHandLeftOnHead == 10)
                {
                    Log("BIEN");
                    labelResult2.Content       = "BIEN";
                    labelResult2.Visibility    = Visibility.Visible;
                    imageHeadInBody.Visibility = Visibility.Visible;
                    imageHead.Visibility       = Visibility.Hidden;
                }
            }
        }
Beispiel #42
0
        //main game loop and logic; 30fps instead of time ticker
        private void sensor_AllFramesReady(object sender, AllFramesReadyEventArgs e)
        {
            depthImagePixels = new DepthImagePixel[sensor.DepthStream.FramePixelDataLength];

            //on MotionCanvas paints the input from the regular rgb camera
            using (var frame = e.OpenColorImageFrame())
            {
                if (frame == null)
                {
                    return;
                }
                var bitmap = CreateBitmap(frame);
                MotionCanvas.Background = new ImageBrush(bitmap);
            }

            //on ImageCanvas changes the layer color / everything else remains black
            using (var frame = e.OpenDepthImageFrame())
            {
                depthImagePixels = new DepthImagePixel[sensor.DepthStream.FramePixelDataLength];
                try
                {
                    frame.CopyDepthImagePixelDataTo(depthImagePixels);
                }
                catch (Exception ex)
                {
                    Console.WriteLine(ex);
                }
                var colorPixels = new byte[4 * sensor.DepthStream.FramePixelDataLength];      // 4byte/pixel

                for (int i = 0; i < colorPixels.Length; i += 4)
                {
                    if (depthImagePixels[i / 4].PlayerIndex != 0)
                    {
                        if (r > 10 || g > 10 || b > 10)//slider for custom rgb color, by default green
                        {
                            colorPixels[i + 2] = (byte)r;
                            colorPixels[i + 1] = (byte)g;
                            colorPixels[i + 0] = (byte)b;
                        }
                        else
                        {
                            colorPixels[i + spectre] = 255;
                        }
                    }
                }
                ImageCanvas.Background = new ImageBrush(colorPixels.ToBitmapSource(640, 480));

                //on MotionCanvas draws the polylines to leave color traces
                if (isDrawing == true)
                {
                    trail1.Points.Add(new Point {
                        X = leftHandPoint.X - 70, Y = leftHandPoint.Y - 20
                    });
                    trail2.Points.Add(new Point {
                        X = rightHandPoint.X - 150, Y = rightHandPoint.Y - 20
                    });
                    trail3.Points.Add(new Point {
                        X = leftFootPoint.X - 70, Y = leftFootPoint.Y - 20
                    });
                    trail4.Points.Add(new Point {
                        X = rightFootPoint.X - 150, Y = rightFootPoint.Y - 20
                    });
                }
                else if (isSet == false && isDrawing == false)
                {
                    trail1                 = new Polyline();
                    trail1.Stroke          = System.Windows.Media.Brushes.Red;
                    trail1.StrokeThickness = 5;
                    trail2                 = new Polyline();
                    trail2.Stroke          = System.Windows.Media.Brushes.Blue;
                    trail2.StrokeThickness = 5;
                    trail3                 = new Polyline();
                    trail3.Stroke          = System.Windows.Media.Brushes.Green;
                    trail3.StrokeThickness = 5;
                    trail4                 = new Polyline();
                    trail4.Stroke          = System.Windows.Media.Brushes.HotPink;
                    trail4.StrokeThickness = 5;
                    isSet = true;
                    MotionCanvas.Children.Clear();
                    MotionCanvas.Children.Add(trail1);
                    MotionCanvas.Children.Add(trail2);
                    MotionCanvas.Children.Add(trail3);
                    MotionCanvas.Children.Add(trail4);
                }
            }

            //skeleton tracking
            using (var frame = e.OpenSkeletonFrame())
            {
                if (frame == null)
                {
                    return;
                }
                var skeletons = new Skeleton[frame.SkeletonArrayLength];
                frame.CopySkeletonDataTo(skeletons);
                var skeleton = skeletons.FirstOrDefault(s => s.TrackingState == SkeletonTrackingState.Tracked);
                if (skeleton == null)
                {
                    return;
                }

                var rightHand = skeleton.Joints[JointType.HandRight].Position;
                var leftHand  = skeleton.Joints[JointType.HandLeft].Position;
                var rightLeg  = skeleton.Joints[JointType.FootRight].Position;
                var leftLeg   = skeleton.Joints[JointType.FootLeft].Position;

                var mapper      = new CoordinateMapper(sensor);
                var colorPoint  = mapper.MapSkeletonPointToColorPoint(rightHand, ColorImageFormat.RawBayerResolution640x480Fps30);
                var colorPoint2 = mapper.MapSkeletonPointToColorPoint(rightLeg, ColorImageFormat.RawBayerResolution640x480Fps30);
                var colorPoint3 = mapper.MapSkeletonPointToColorPoint(leftLeg, ColorImageFormat.RawBayerResolution640x480Fps30);
                var colorPoint4 = mapper.MapSkeletonPointToColorPoint(leftHand, ColorImageFormat.RawBayerResolution640x480Fps30);

                //hand/feet positions
                leftHandPoint.X = colorPoint4.X;
                leftHandPoint.Y = colorPoint4.Y;

                rightHandPoint.X = colorPoint.X;
                rightHandPoint.Y = colorPoint.Y;

                rightFootPoint.X = colorPoint2.X;
                rightFootPoint.Y = colorPoint2.Y;

                leftFootPoint.X = colorPoint3.X;
                leftFootPoint.Y = colorPoint3.Y;

                var pointList = new List <ColorImagePoint>();
                pointList.Add(colorPoint);
                pointList.Add(colorPoint2);
                pointList.Add(colorPoint3);
                pointList.Add(colorPoint4);

                BulletBounce(pointList);

                var circle = CreateCircle(colorPoint);
                LevelStart(colorPoint, circle);

                ProjectileMove();
            }
        }
Beispiel #43
0
        /// <summary>
        /// Called whenever the frames are ready.
        /// Processes the incoming frames using the RGBTracker.
        /// When processed, calls frameReady for post-frame work & visualisation
        /// </summary>
        /// <param name="sender"></param>
        /// <param name="frames"></param>
        private void handleNewFrame(object sender, AllFramesReadyEventArgs frames)
        {
            //Console.WriteLine("Frame!");
            bool run;

            byte[] thisColorFrame;

            lock (this)
            {
                this.frameCounter++;
                run = ((frameCounter % gapBetweenFrames) == 0 & (tracking = true));
            }

            // Process the frame?
            if (run == true)
            {
                //Console.WriteLine("Running!!!!!");

                using (ColorImageFrame CIF = frames.OpenColorImageFrame())
                    using (DepthImageFrame DIF = frames.OpenDepthImageFrame())
                        using (SkeletonFrame SF = frames.OpenSkeletonFrame())
                        {
                            // Get the frames
                            if (CIF != null & DIF != null)
                            {
                                //Console.WriteLine("Get frames");
                                thisColorFrame = new byte[640 * 480 * 4];
                                CIF.CopyPixelDataTo(thisColorFrame);
                                CIF.CopyPixelDataTo(colorFrame);
                                DIF.CopyPixelDataTo(depthFrame);

                                if (SF != null)
                                {
                                    skeletonFrame = new Skeleton[SF.SkeletonArrayLength];
                                    SF.CopySkeletonDataTo(skeletonFrame);

                                    int skeletons = 0;
                                    foreach (var skeleton in skeletonFrame)
                                    {
                                        if (skeleton.TrackingState == SkeletonTrackingState.Tracked)
                                        {
                                            skeletons++;
                                        }
                                    }
                                    activeSkeletons = skeletons;
                                }
                            }
                            else
                            {
                                // Frames automatically disposed via using statement
                                return;
                            }

                            // Now the frame data has been captured, process it!

                            // Need to use local copies to maintain volatility of actual variables
                            int    tempX     = 0;
                            int    tempY     = 0;
                            double tempAngle = 0;

                            // Get the values
                            byte[] colorFrame2 = (byte[])thisColorFrame.Clone();

                            //new Thread(() =>
                            tracker.ProcessFrame(colorFrame2, out tempX, out tempY, out tempAngle);
                            //).Start();
                            //Console.WriteLine("Valz: " + tempX + ", " + tempY + ", " + tempAngle);

                            // Perform motion smoothing in a function rather than having it all here
                            UpdateCoordinates(tempX, tempY, tempAngle);

                            lock (this)
                            {
                                this.colorFrame = thisColorFrame;
                            }

                            // Post-frame work
                            frameReady();
                        }
            }
            else
            {
                //Console.WriteLine("Ignore frame - frames = " + this.frameCounter);

                // Ignore the frame & dispose
                using (ColorImageFrame CIF = frames.OpenColorImageFrame())
                    using (DepthImageFrame DIF = frames.OpenDepthImageFrame())
                        using (SkeletonFrame SF = frames.OpenSkeletonFrame())
                        {
                            try
                            {
                                // But, get the latest colour frame for display, first!
                                if (frames.OpenColorImageFrame() != null)
                                {
                                    frames.OpenColorImageFrame().CopyPixelDataTo(colorFrame);
                                }
                            }
                            catch (NullReferenceException Frame_Is_Empty)
                            {
                                // Try to ignore it?
                            }
                        }

                updateVisualisation();
            }
        }
Beispiel #44
0
        void GetCameraPoint(Skeleton first, AllFramesReadyEventArgs e)
        {
            using (DepthImageFrame depth = e.OpenDepthImageFrame())
            {
                if (depth == null)
                {
                    return;
                }

                //Map a joint location to a point on the depth map
                //head
                DepthImagePoint headDepthPoint = depth.MapFromSkeletonPoint(first.Joints[JointType.Head].Position);
                //left shoulder
                DepthImagePoint leftSDepthPoint = depth.MapFromSkeletonPoint(first.Joints[JointType.ShoulderLeft].Position);
                //right shoulder
                DepthImagePoint rightSDepthPoint = depth.MapFromSkeletonPoint(first.Joints[JointType.ShoulderRight].Position);
                //right elbow
                DepthImagePoint rightEDepthPoint = depth.MapFromSkeletonPoint(first.Joints[JointType.ElbowRight].Position);
                //left elbow
                DepthImagePoint leftEDepthPoint = depth.MapFromSkeletonPoint(first.Joints[JointType.ElbowLeft].Position);
                //right hand
                DepthImagePoint rightHdDepthPoint = depth.MapFromSkeletonPoint(first.Joints[JointType.HandRight].Position);
                //left hand
                DepthImagePoint leftHdDepthPoint = depth.MapFromSkeletonPoint(first.Joints[JointType.HandLeft].Position);
                //spine
                DepthImagePoint spineDepthPoint = depth.MapFromSkeletonPoint(first.Joints[JointType.Spine].Position);
                //right hip
                DepthImagePoint rightHpDepthPoint = depth.MapFromSkeletonPoint(first.Joints[JointType.HipRight].Position);
                //left hip
                DepthImagePoint leftHpDepthPoint = depth.MapFromSkeletonPoint(first.Joints[JointType.HipLeft].Position);
                //hip centre
                DepthImagePoint centreHpDepthPoint = depth.MapFromSkeletonPoint(first.Joints[JointType.HipCenter].Position);
                //right knee
                DepthImagePoint rightKDepthPoint = depth.MapFromSkeletonPoint(first.Joints[JointType.KneeRight].Position);
                //left knee
                DepthImagePoint leftKDepthPoint = depth.MapFromSkeletonPoint(first.Joints[JointType.KneeLeft].Position);
                //right ankle
                DepthImagePoint rightADepthPoint = depth.MapFromSkeletonPoint(first.Joints[JointType.AnkleRight].Position);
                //left ankle
                DepthImagePoint leftADepthPoint = depth.MapFromSkeletonPoint(first.Joints[JointType.AnkleLeft].Position);

                //Map a depth point to a point on the color image
                //head
                ColorImagePoint headColorPoint = depth.MapToColorImagePoint(headDepthPoint.X, headDepthPoint.Y, ColorImageFormat.RgbResolution640x480Fps30);
                //left shoulder
                ColorImagePoint leftShoulderColorPoint = depth.MapToColorImagePoint(leftSDepthPoint.X, leftSDepthPoint.Y, ColorImageFormat.RgbResolution640x480Fps30);
                //right shoulder
                ColorImagePoint rightShoulderColorPoint = depth.MapToColorImagePoint(rightSDepthPoint.X, rightSDepthPoint.Y, ColorImageFormat.RgbResolution640x480Fps30);
                //right elbow
                ColorImagePoint rightEColorPoint = depth.MapToColorImagePoint(rightEDepthPoint.X, rightEDepthPoint.Y, ColorImageFormat.RgbResolution640x480Fps30);
                //left eblow
                ColorImagePoint leftEColorPoint = depth.MapToColorImagePoint(leftEDepthPoint.X, leftEDepthPoint.Y, ColorImageFormat.RgbResolution640x480Fps30);
                //right hand
                ColorImagePoint rightHdColorPoint = depth.MapToColorImagePoint(rightHdDepthPoint.X, rightHdDepthPoint.Y, ColorImageFormat.RgbResolution640x480Fps30);
                //left hand
                ColorImagePoint leftHdColorPoint = depth.MapToColorImagePoint(leftHdDepthPoint.X, leftHdDepthPoint.Y, ColorImageFormat.RgbResolution640x480Fps30);
                //spine
                ColorImagePoint spineColorPoint = depth.MapToColorImagePoint(spineDepthPoint.X, spineDepthPoint.Y, ColorImageFormat.RawBayerResolution640x480Fps30);
                //right hip
                ColorImagePoint rightHpColorPoint = depth.MapToColorImagePoint(rightHpDepthPoint.X, rightHpDepthPoint.Y, ColorImageFormat.RgbResolution640x480Fps30);
                //left hip
                ColorImagePoint leftHpColorPoint = depth.MapToColorImagePoint(leftHpDepthPoint.X, leftHpDepthPoint.Y, ColorImageFormat.RgbResolution640x480Fps30);
                //centre hip
                ColorImagePoint centreHpColorPoint = depth.MapToColorImagePoint(centreHpDepthPoint.X, centreHpDepthPoint.Y, ColorImageFormat.RgbResolution640x480Fps30);
                //right knee
                ColorImagePoint rightKColorPoint = depth.MapToColorImagePoint(rightKDepthPoint.X, rightKDepthPoint.Y, ColorImageFormat.RgbResolution640x480Fps30);
                //left knee
                ColorImagePoint leftKColorPoint = depth.MapToColorImagePoint(leftKDepthPoint.X, leftKDepthPoint.Y, ColorImageFormat.RgbResolution640x480Fps30);
                //right ankle
                ColorImagePoint rightAColorPoint = depth.MapToColorImagePoint(rightADepthPoint.X, rightADepthPoint.Y, ColorImageFormat.RgbResolution640x480Fps30);
                //left ankle
                ColorImagePoint leftAColorPoint = depth.MapToColorImagePoint(leftADepthPoint.X, leftADepthPoint.Y, ColorImageFormat.RgbResolution640x480Fps30);

                //Set Points
                jointPointDict.AddOrUpdate("Head", headColorPoint, (key, oldValue) => headColorPoint);
                jointPointDict.AddOrUpdate("ShoulderLeft", leftShoulderColorPoint, (key, oldValue) => leftShoulderColorPoint);
                jointPointDict.AddOrUpdate("ShoulderRight", rightShoulderColorPoint, (key, oldValue) => rightShoulderColorPoint);
                jointPointDict.AddOrUpdate("ElbowRight", rightEColorPoint, (key, oldValue) => rightEColorPoint);
                jointPointDict.AddOrUpdate("ElbowLeft", leftEColorPoint, (key, oldValue) => leftEColorPoint);
                jointPointDict.AddOrUpdate("HandRight", rightHdColorPoint, (key, oldValue) => rightHdColorPoint);
                jointPointDict.AddOrUpdate("HandLeft", leftHdColorPoint, (key, oldValue) => leftHdColorPoint);
                jointPointDict.AddOrUpdate("Spine", spineColorPoint, (key, oldValue) => spineColorPoint);
                jointPointDict.AddOrUpdate("HipRight", rightHpColorPoint, (key, oldValue) => rightHpColorPoint);
                jointPointDict.AddOrUpdate("HipLeft", leftHpColorPoint, (key, oldValue) => leftHpColorPoint);
                jointPointDict.AddOrUpdate("HipCentre", centreHpColorPoint, (key, oldValue) => centreHpColorPoint);
                jointPointDict.AddOrUpdate("KneeRight", rightKColorPoint, (key, oldValue) => rightKColorPoint);
                jointPointDict.AddOrUpdate("KneeLeft", leftKColorPoint, (key, oldValue) => leftKColorPoint);
                jointPointDict.AddOrUpdate("AnkleRight", rightAColorPoint, (key, oldValue) => rightAColorPoint);
                jointPointDict.AddOrUpdate("AnkleLeft", leftAColorPoint, (key, oldValue) => leftAColorPoint);
            }
        }
Beispiel #45
0
        void nui_AllFramesReady(object sender, AllFramesReadyEventArgs e)
        {
            ColorImageFrame cf = e.OpenColorImageFrame();
            DepthImageFrame df = e.OpenDepthImageFrame();
            SkeletonFrame   sf = e.OpenSkeletonFrame();

            //            Skeleton[] skeletonData = new Skeleton[sf.SkeletonArrayLength];

            if (cf == null || df == null || sf == null)
            {
                return;
            }


            byte[] ImageBits = new byte[cf.PixelDataLength];
            cf.CopyPixelDataTo(ImageBits);

            BitmapSource src = null;

            src = BitmapSource.Create(cf.Width, cf.Height,
                                      96, 96, PixelFormats.Bgr32, null,
                                      ImageBits,
                                      cf.Width * cf.BytesPerPixel);
            image2.Source = src;



            // Check for image format changes.  The FaceTracker doesn't
            // deal with that so we need to reset.
            if (this.depthImageFormat != df.Format)
            {
                this.ResetFaceTracking();
                this.depthImage       = null;
                this.depthImageFormat = df.Format;
            }

            if (this.colorImageFormat != cf.Format)
            {
                this.ResetFaceTracking();
                this.colorImage       = null;
                this.colorImageFormat = cf.Format;
            }

            // Create any buffers to store copies of the data we work with
            if (this.depthImage == null)
            {
                this.depthImage = new short[df.PixelDataLength];
            }

            if (this.colorImage == null)
            {
                this.colorImage = new byte[cf.PixelDataLength];
            }

            // Get the skeleton information
            if (this.skeletonData == null || this.skeletonData.Length != sf.SkeletonArrayLength)
            {
                this.skeletonData = new Skeleton[sf.SkeletonArrayLength];
            }

            cf.CopyPixelDataTo(this.colorImage);
            df.CopyPixelDataTo(this.depthImage);
            sf.CopySkeletonDataTo(this.skeletonData);

            foreach (Skeleton skeleton in this.skeletonData)
            {
                if (skeleton.TrackingState == SkeletonTrackingState.Tracked ||
                    skeleton.TrackingState == SkeletonTrackingState.PositionOnly)
                {
                    // We want keep a record of any skeleton, tracked or untracked.
                    if (!this.trackedSkeletons.ContainsKey(skeleton.TrackingId))
                    {
                        this.trackedSkeletons.Add(skeleton.TrackingId, new SkeletonFaceTracker());
                    }

                    // Give each tracker the upated frame.
                    SkeletonFaceTracker skeletonFaceTracker;
                    if (this.trackedSkeletons.TryGetValue(skeleton.TrackingId, out skeletonFaceTracker))
                    {
                        skeletonFaceTracker.OnFrameReady(nui2, colorImageFormat, colorImage, depthImageFormat, depthImage, skeleton);
                        skeletonFaceTracker.LastTrackedFrame = sf.FrameNumber;
                    }
                }
            }



            using (DepthImageFrame depthImageFrame = e.OpenDepthImageFrame())
            {
                if (depthImageFrame != null)
                {
                    foreach (Skeleton sd in skeletonData)
                    {
                        if (sd.TrackingState == SkeletonTrackingState.Tracked)
                        {
                            Joint joint = sd.Joints[JointType.Head];

                            DepthImagePoint depthPoint;

                            //                            CoordinateMapper coordinateMapper = new CoordinateMapper(nui);
                            //                            depthPoint = coordinateMapper.MapSkeletonPointToDepthPoint(joint.Position, DepthImageFormat.Resolution320x240Fps30);

                            depthPoint = depthImageFrame.MapFromSkeletonPoint(joint.Position);

                            System.Windows.Point point = new System.Windows.Point((int)(image2.ActualWidth * depthPoint.X
                                                                                        / depthImageFrame.Width),
                                                                                  (int)(image2.ActualHeight * depthPoint.Y
                                                                                        / depthImageFrame.Height));


                            Canvas.SetLeft(ellipse1, (point.X) - ellipse1.Width);
                            Canvas.SetTop(ellipse1, (point.Y) - ellipse1.Height);

                            App thisApp = App.Current as App;

                            Canvas.SetLeft(rect2, thisApp.m_dbX - rect2.Width);
                            Canvas.SetTop(rect2, thisApp.m_dbY - rect2.Height);

                            double GapX, GapY;
                            GapX = point.X - (thisApp.m_dbX - 2);
                            GapY = point.Y - (thisApp.m_dbY - 2);

                            int siteX = 999, siteY = 999;

                            if (GapX < 30 && GapX > -30)
                            {
                                siteX = 1;
                            }
                            else if (GapX >= 30)
                            {
                                siteX = 0;
                            }
                            else if (GapY <= -30)
                            {
                                siteX = 2;
                            }

                            if (GapY >= -40)
                            {
                                siteY = 0;
                            }
                            else if (GapY < -40 && GapY > -60)
                            {
                                siteY = 1;
                            }
                            else if (GapY <= -60)
                            {
                                siteY = 2;
                            }

                            int site;
                            site = siteX + (siteY * 3);
                            if (site == 0)
                            {
                                text2.Text = "좌상";
                            }
                            else if (site == 1)
                            {
                                text2.Text = "상";
                            }
                            else if (site == 2)
                            {
                                text2.Text = "우상";
                            }
                            else if (site == 3)
                            {
                                text2.Text = "좌";
                            }
                            else if (site == 4)
                            {
                                text2.Text = "정";
                            }
                            else if (site == 5)
                            {
                                text2.Text = "우";
                            }
                            else if (site == 6)
                            {
                                text2.Text = "좌하";
                            }
                            else if (site == 7)
                            {
                                text2.Text = "하";
                            }
                            else if (site == 8)
                            {
                                text2.Text = "우하";
                            }

                            thisApp.nowsite = site;

                            /*
                             *
                             * rect4.X = facePoints[i].X - 2;
                             * rect4.Y = facePoints[i].Y - 2;
                             * rect4.Width = 4;
                             * rect4.Height = 4;
                             */
                        }
                    }
                }
            }
        }
        private void OnAllFramesReady(object sender, AllFramesReadyEventArgs allFramesReadyEventArgs)
        {
            ColorImageFrame colorImageFrame = null;
            DepthImageFrame depthImageFrame = null;
            SkeletonFrame   skeletonFrame   = null;

            try
            {
                colorImageFrame = allFramesReadyEventArgs.OpenColorImageFrame();
                depthImageFrame = allFramesReadyEventArgs.OpenDepthImageFrame();
                skeletonFrame   = allFramesReadyEventArgs.OpenSkeletonFrame();

                if (colorImageFrame == null || depthImageFrame == null || skeletonFrame == null)
                {
                    return;
                }

                // Check for image format changes.  The FaceTracker doesn't
                // deal with that so we need to reset.
                if (this.depthImageFormat != depthImageFrame.Format)
                {
                    this.ResetFaceTracking();
                    this.depthImage       = null;
                    this.depthImageFormat = depthImageFrame.Format;
                }

                if (this.colorImageFormat != colorImageFrame.Format)
                {
                    this.ResetFaceTracking();
                    this.colorImage       = null;
                    this.colorImageFormat = colorImageFrame.Format;
                }

                // Create any buffers to store copies of the data we work with
                if (this.depthImage == null)
                {
                    this.depthImage = new short[depthImageFrame.PixelDataLength];
                }

                if (this.colorImage == null)
                {
                    this.colorImage = new byte[colorImageFrame.PixelDataLength];
                }

                // Get the skeleton information
                if (this.skeletonData == null || this.skeletonData.Length != skeletonFrame.SkeletonArrayLength)
                {
                    this.skeletonData = new Skeleton[skeletonFrame.SkeletonArrayLength];
                }

                colorImageFrame.CopyPixelDataTo(this.colorImage);
                depthImageFrame.CopyPixelDataTo(this.depthImage);
                skeletonFrame.CopySkeletonDataTo(this.skeletonData);

                // Update the list of trackers and the trackers with the current frame information
                foreach (Skeleton skeleton in this.skeletonData)
                {
                    if (skeleton.TrackingState == SkeletonTrackingState.Tracked ||
                        skeleton.TrackingState == SkeletonTrackingState.PositionOnly)
                    {
                        // We want keep a record of any skeleton, tracked or untracked.
                        if (!this.trackedSkeletons.ContainsKey(skeleton.TrackingId))
                        {
                            this.trackedSkeletons.Add(skeleton.TrackingId, new SkeletonFaceTracker());
                        }

                        // Give each tracker the upated frame.
                        SkeletonFaceTracker skeletonFaceTracker;
                        if (this.trackedSkeletons.TryGetValue(skeleton.TrackingId, out skeletonFaceTracker))
                        {
                            skeletonFaceTracker.OnFrameReady(this.Kinect, colorImageFormat, colorImage, depthImageFormat, depthImage, skeleton);
                            skeletonFaceTracker.LastTrackedFrame = skeletonFrame.FrameNumber;
                        }
                    }
                }

                this.RemoveOldTrackers(skeletonFrame.FrameNumber);

                this.InvalidateVisual();
            }
            finally
            {
                if (colorImageFrame != null)
                {
                    colorImageFrame.Dispose();
                }

                if (depthImageFrame != null)
                {
                    depthImageFrame.Dispose();
                }

                if (skeletonFrame != null)
                {
                    skeletonFrame.Dispose();
                }
            }
        }
Beispiel #47
0
        void sensor_AllFramesReady(object sender, AllFramesReadyEventArgs e)
        {
            if (testnumber == 1)
            {
                getSelectedFiles();
                startDrag();
                testnumber = 0;
            }

            drag();

            if (closing)
            {
                return;
            }

            //Get a skeleton
            Skeleton first = GetFirstSkeleton(e);

            if (first == null)
            {
                return;
            }

            //set scaled position
            //ScalePosition(headImage, first.Joints[JointType.Head]);
            //ScalePosition(leftEllipse, first.Joints[JointType.HandLeft]);
            ScalePosition(Cursor, first.Joints[JointType.HandRight]);
            using (DepthImageFrame depth = e.OpenDepthImageFrame())
            {
                if (depth == null ||
                    kinectSensorChooser1.Kinect == null)
                {
                    return;
                }
                float[]         skeletonValues      = new float[9];
                DepthImagePoint leftSkeletalPoint2  = depth.MapFromSkeletonPoint(first.Joints[JointType.HandLeft].Position);
                DepthImagePoint rightSkeletalPoint2 = depth.MapFromSkeletonPoint(first.Joints[JointType.HandRight].Position);
                DepthImagePoint headDepthPoint      = depth.MapFromSkeletonPoint(first.Joints[JointType.Head].Position);
                skeletonValues[0] = leftSkeletalPoint2.X;
                skeletonValues[1] = leftSkeletalPoint2.Y;
                skeletonValues[2] = leftSkeletalPoint2.Depth;
                skeletonValues[3] = rightSkeletalPoint2.X;
                skeletonValues[4] = rightSkeletalPoint2.Y;
                skeletonValues[5] = rightSkeletalPoint2.Depth;
                skeletonValues[6] = headDepthPoint.X;
                skeletonValues[7] = headDepthPoint.Y;
                skeletonValues[8] = headDepthPoint.Depth;
                storedSkeletonValues.Add(skeletonValues);
                System.Windows.Forms.Cursor.Position = new System.Drawing.Point((int)RHPos[0], (int)RHPos[1]);

                if (!actionWait)
                {
                    if (!selectActivated)
                    {
                        CheckSwipe(e);
                    }
                    CheckStatic(e);
                }
                if (lassoFilesDragging)
                {
                    //Console.WriteLine("lassofiles dragging");
                    //myimg_MouseMove();
                    CursorInCommitBoxZone();
                    CursorInTrashZone();
                }
                if (selectActivated)
                {
                    mouseLeftDown();
                }
            }
            GetCameraPoint(first, e);

            /*if (selectActivated)
             * {
             *  FollowPointer();
             * }
             * */

            FollowPointer();
        }
Beispiel #48
0
        /// <summary>
        /// Event handler for Kinect sensor's DepthFrameReady event
        /// </summary>
        /// <param name="sender">object sending the event</param>
        /// <param name="e">event arguments</param>
        private void SensorAllFramesReady(object sender, AllFramesReadyEventArgs e)
        {
            // in the middle of shutting down, so nothing to do
            if (null == this.sensor)
            {
                return;
            }

            bool depthReceived = false;
            bool colorReceived = false;

            using (DepthImageFrame depthFrame = e.OpenDepthImageFrame())
            {
                if (null != depthFrame)
                {
                    // Copy the pixel data from the image to a temporary array
                    depthFrame.CopyPixelDataTo(this.depthPixels);

                    depthReceived = true;
                }
            }

            using (ColorImageFrame colorFrame = e.OpenColorImageFrame())
            {
                if (null != colorFrame)
                {
                    // Copy the pixel data from the image to a temporary array
                    colorFrame.CopyPixelDataTo(this.colorPixels);

                    colorReceived = true;
                }
            }

            // do our processing outside of the using block
            // so that we return resources to the kinect as soon as possible
            if (true == depthReceived)
            {
                this.sensor.MapDepthFrameToColorFrame(
                    DepthFormat,
                    this.depthPixels,
                    ColorFormat,
                    this.colorCoordinates);

                Array.Clear(this.greenScreenPixelData, 0, this.greenScreenPixelData.Length);

                // loop over each row and column of the depth
                for (int y = 0; y < this.depthHeight; ++y)
                {
                    for (int x = 0; x < this.depthWidth; ++x)
                    {
                        // calculate index into depth array
                        int depthIndex = x + (y * this.depthWidth);

                        short depthPixel = this.depthPixels[depthIndex];

                        int player = depthPixel & DepthImageFrame.PlayerIndexBitmask;

                        // if we're tracking a player for the current pixel, do green screen
                        if (player > 0)
                        {
                            // retrieve the depth to color mapping for the current depth pixel
                            ColorImagePoint colorImagePoint = this.colorCoordinates[depthIndex];

                            // scale color coordinates to depth resolution
                            int colorInDepthX = colorImagePoint.X / this.colorToDepthDivisor;
                            int colorInDepthY = colorImagePoint.Y / this.colorToDepthDivisor;

                            // make sure the depth pixel maps to a valid point in color space
                            // check y > 0 and y < depthHeight to make sure we don't write outside of the array
                            // check x > 0 instead of >= 0 since to fill gaps we set opaque current pixel plus the one to the left
                            // because of how the sensor works it is more correct to do it this way than to set to the right
                            if (colorInDepthX > 0 && colorInDepthX < this.depthWidth && colorInDepthY >= 0 && colorInDepthY < this.depthHeight)
                            {
                                // calculate index into the green screen pixel array
                                int greenScreenIndex = colorInDepthX + (colorInDepthY * this.depthWidth);

                                // set opaque
                                this.greenScreenPixelData[greenScreenIndex] = opaquePixelValue;

                                // compensate for depth/color not corresponding exactly by setting the pixel
                                // to the left to opaque as well
                                this.greenScreenPixelData[greenScreenIndex - 1] = opaquePixelValue;
                            }
                        }
                    }
                }
            }

            // do our processing outside of the using block
            // so that we return resources to the kinect as soon as possible
            if (true == colorReceived)
            {
                // Write the pixel data into our bitmap
                this.colorBitmap.WritePixels(
                    new Int32Rect(0, 0, this.colorBitmap.PixelWidth, this.colorBitmap.PixelHeight),
                    this.colorPixels,
                    this.colorBitmap.PixelWidth * sizeof(int),
                    0);

                if (this.playerOpacityMaskImage == null)
                {
                    this.playerOpacityMaskImage = new WriteableBitmap(
                        this.depthWidth,
                        this.depthHeight,
                        96,
                        96,
                        PixelFormats.Bgra32,
                        null);

                    MaskedColor.OpacityMask = new ImageBrush {
                        ImageSource = this.playerOpacityMaskImage
                    };
                }

                this.playerOpacityMaskImage.WritePixels(
                    new Int32Rect(0, 0, this.depthWidth, this.depthHeight),
                    this.greenScreenPixelData,
                    this.depthWidth * ((this.playerOpacityMaskImage.Format.BitsPerPixel + 7) / 8),
                    0);
            }
        }
        private void OnAllFramesReady(object sender, AllFramesReadyEventArgs allFramesReadyEventArgs)
        {
            ColorImageFrame colorImageFrame = null;
            DepthImageFrame depthImageFrame = null;
            SkeletonFrame   skeletonFrame   = null;

            try
            {
                colorImageFrame = allFramesReadyEventArgs.OpenColorImageFrame();
                depthImageFrame = allFramesReadyEventArgs.OpenDepthImageFrame();
                skeletonFrame   = allFramesReadyEventArgs.OpenSkeletonFrame();

                if (colorImageFrame == null || depthImageFrame == null || skeletonFrame == null)
                {
                    return;
                }

                // Check for image format changes.  The FaceTracker doesn't
                // deal with that so we need to reset.
                if (this.depthImageFormat != depthImageFrame.Format)
                {
                    this.resetFaceTracking();
                    this.depthImage       = null;
                    this.depthImageFormat = depthImageFrame.Format;
                }

                if (this.colorImageFormat != colorImageFrame.Format)
                {
                    this.resetFaceTracking();
                    this.colorImage       = null;
                    this.colorImageFormat = colorImageFrame.Format;
                }

                // Create any buffers to store copies of the data we work with
                if (this.depthImage == null)
                {
                    this.depthImage = new short[depthImageFrame.PixelDataLength];
                }

                if (this.colorImage == null)
                {
                    this.colorImage = new byte[colorImageFrame.PixelDataLength];
                }

                // Get the skeleton information
                if (this.skeletonData == null || this.skeletonData.Length != skeletonFrame.SkeletonArrayLength)
                {
                    this.skeletonData = new Skeleton[skeletonFrame.SkeletonArrayLength];
                }

                colorImageFrame.CopyPixelDataTo(this.colorImage);
                depthImageFrame.CopyPixelDataTo(this.depthImage);
                skeletonFrame.CopySkeletonDataTo(this.skeletonData);

                // Update the list of trackers and the trackers with the current frame information
                foreach (Skeleton skeleton in this.skeletonData)
                {
                    if (skeleton.TrackingState == SkeletonTrackingState.Tracked ||
                        skeleton.TrackingState == SkeletonTrackingState.PositionOnly)
                    {
                        try
                        {
                            this.trackedSkeleton.OnFrameReady(this.Kinect, colorImageFormat, colorImage, depthImageFormat, depthImage, skeleton);
                        }
                        catch (NullReferenceException)
                        {
                            //se si perder il tracking del viso si evita un crash
                        }
                        this.trackedSkeleton.LastTrackedFrame = skeletonFrame.FrameNumber;
                    }
                }

                this.InvalidateVisual();
            }
            finally
            {
                if (colorImageFrame != null)
                {
                    colorImageFrame.Dispose();
                }

                if (depthImageFrame != null)
                {
                    depthImageFrame.Dispose();
                }

                if (skeletonFrame != null)
                {
                    skeletonFrame.Dispose();
                }
            }
        }
Beispiel #50
0
        private void AllFramesReadyEventHandler(object sender, AllFramesReadyEventArgs args)
        {
            using (ColorImageFrame ciFrame = args.OpenColorImageFrame())
            {
                if (null != ciFrame)
                {
                    ciFrame.CopyPixelDataTo(this.ColorPixels);

                    ColorBitmap.WritePixels(new Int32Rect(0, 0, ColorWidth, ColorHeight),
                                            ColorPixels, ColorWidth * sizeof(int), 0);
                }
            }

            using (DepthImageFrame diFrame = args.OpenDepthImageFrame())
            {
                if (null != diFrame)
                {
                    diFrame.CopyPixelDataTo(this.DepthDatas);
                }
                else
                {
                    return;
                }
            }

            // Clear
            //Array.Clear(PlayerPixels, 0, PlayerPixels.Length);
            //System.Threading.Tasks.Parallel.For(0, PlayerPixels.Length, index =>
            //    {
            //        PlayerPixels[index] = 200;
            //    });

            Array.Clear(CIP, 0, CIP.Length);

            gSensor.MapDepthFrameToColorFrame(DIF, DepthDatas, CIF, CIP);

            byte[] pixels = new byte[gSensor.DepthStream.FramePixelDataLength * sizeof(int)];

            // Fill the Player Image
            for (int hIndex = 0; hIndex < DepthHeight; ++hIndex)
            {
                for (int wIndex = 0; wIndex < DepthWidth; ++wIndex)
                {
                    int index = wIndex + hIndex * DepthWidth;
                    //int player = DepthDatas[index] & DepthImageFrame.PlayerIndexBitmask;

                    if (0 < (DepthDatas[index] & DepthImageFrame.PlayerIndexBitmask)) // Just for Player
                    {
                        ColorImagePoint cip = CIP[index];

                        // scale color coordinates to depth resolution
                        int colorInDepthX = (int)(cip.X / this.Divisor);
                        int colorInDepthY = (int)(cip.Y / this.Divisor);

                        if (colorInDepthX > 0 && colorInDepthX < this.DepthWidth &&
                            colorInDepthY >= 0 && colorInDepthY < this.DepthHeight)
                        {
                            // calculate index into the green screen pixel array
                            int playerIndex = (colorInDepthX + (colorInDepthY * this.DepthWidth)) << 2;
                            int colorIndex  = (cip.X + cip.Y * ColorWidth) << 2;

                            pixels[playerIndex]     = ColorPixels[colorIndex]; //BitConverter.ToInt32(ColorPixels, colorIndex);
                            pixels[playerIndex + 1] = ColorPixels[colorIndex + 1];
                            pixels[playerIndex + 2] = ColorPixels[colorIndex + 2];
                            pixels[playerIndex + 3] = ColorPixels[colorIndex + 3];

                            --playerIndex;
                            --colorIndex;

                            pixels[playerIndex]     = ColorPixels[colorIndex]; //BitConverter.ToInt32(ColorPixels, colorIndex);
                            pixels[playerIndex + 1] = ColorPixels[colorIndex + 1];
                            pixels[playerIndex + 2] = ColorPixels[colorIndex + 2];
                            pixels[playerIndex + 3] = ColorPixels[colorIndex + 3];
                        }

                        HadPlayer = true;
                    }
                    //else
                    //{
                    //    HadPlayer = false;
                    //}
                }
            }

            lock (gLock)
            {
                // Enqueue
                //PixelsQueue.Enqueue(pixels);
                //Average.ResetQueue(PixelsQueue, 3);

                PixelsLinkedList.AddLast(pixels);
                Average.ResetLinkedList(PixelsLinkedList, 3);
            }

            // Smoothen
            if (null == smooth && HadPlayer)
            {
                Color bg = new Color();
                bg.B = bg.G = bg.R = 0;

                // Gaussian
                //smooth = new GaussianFilter(DepthWidth, DepthHeight, PixelFormats.Bgr32, bg);

                // Bilateral
                smooth = new BilateralFilter(DepthWidth, DepthHeight, PixelFormats.Bgr32);

                // Median
                smooth2 = new GenericMedian(DepthWidth, DepthHeight, PixelFormats.Bgr32, bg, 3);

                median = new AForge.Imaging.Filters.Median(5);

                if (null == globalBWorker)
                {
                    globalBWorker         = new BackgroundWorker();
                    globalBWorker.DoWork += DoWorking;

                    globalBWorker.RunWorkerAsync();
                }
            }

            ////PlayerBitmap.WritePixels(new Int32Rect(0, 0, DepthWidth, DepthHeight),
            ////    PlayerPixels, DepthWidth * ((PlayerBitmap.Format.BitsPerPixel + 7) / 8), 0);
        }
Beispiel #51
0
        void KinectFaceNode_AllFrameReady(object sender, AllFramesReadyEventArgs e)
        {
            ColorImageFrame colorImageFrame = null;
            DepthImageFrame depthImageFrame = null;
            SkeletonFrame   skeletonFrame   = null;

            colorImageFrame = e.OpenColorImageFrame();
            depthImageFrame = e.OpenDepthImageFrame();
            skeletonFrame   = e.OpenSkeletonFrame();

            if (colorImageFrame == null || depthImageFrame == null || skeletonFrame == null)
            {
                return;
            }

            if (first)
            {
                first         = false;
                this.olddepth = depthImageFrame.Format;
            }
            else
            {
                if (this.olddepth != depthImageFrame.Format)
                {
                    //Need a reset
                    if (this.depthImage != null)
                    {
                        this.depthImage = null;
                    }
                    if (this.face != null)
                    {
                        this.face.Dispose(); this.face = null;
                    }
                    this.trackedSkeletons.Clear();
                    this.olddepth = depthImageFrame.Format;
                }
            }

            if (this.depthImage == null)
            {
                this.depthImage = new short[depthImageFrame.PixelDataLength];
            }

            if (this.colorImage == null)
            {
                this.colorImage = new byte[colorImageFrame.PixelDataLength];
            }

            if (this.skeletonData == null || this.skeletonData.Length != skeletonFrame.SkeletonArrayLength)
            {
                this.skeletonData = new Skeleton[skeletonFrame.SkeletonArrayLength];
            }

            if (face == null)
            {
                face = new FaceTracker(this.runtime.Runtime);
            }

            colorImageFrame.CopyPixelDataTo(this.colorImage);
            depthImageFrame.CopyPixelDataTo(this.depthImage);
            skeletonFrame.CopySkeletonDataTo(this.skeletonData);

            foreach (Skeleton skeleton in this.skeletonData)
            {
                if (skeleton.TrackingState == SkeletonTrackingState.Tracked ||
                    skeleton.TrackingState == SkeletonTrackingState.PositionOnly)
                {
                    // We want keep a record of any skeleton, tracked or untracked.
                    if (!this.trackedSkeletons.ContainsKey(skeleton.TrackingId))
                    {
                        this.trackedSkeletons.Add(skeleton.TrackingId, new SkeletonFaceTracker());
                    }

                    // Give each tracker the upated frame.
                    SkeletonFaceTracker skeletonFaceTracker;
                    if (this.trackedSkeletons.TryGetValue(skeleton.TrackingId, out skeletonFaceTracker))
                    {
                        skeletonFaceTracker.OnFrameReady(this.runtime.Runtime, colorImageFrame.Format, colorImage, depthImageFrame.Format, depthImage, skeleton);
                        skeletonFaceTracker.LastTrackedFrame = skeletonFrame.FrameNumber;
                    }
                }
            }

            this.RemoveOldTrackers(skeletonFrame.FrameNumber);

            colorImageFrame.Dispose();
            depthImageFrame.Dispose();
            skeletonFrame.Dispose();

            this.FInvalidate = true;
        }
Beispiel #52
0
        private void sensor_AllFramesReady(object sender, AllFramesReadyEventArgs e)
        {
            //TODO Keep the previous frame image as well,
            //Compare both on a background process and save it to the worksheet
            //Convert x&y differences to millimeters according to depth data (distance)
            //and some trigonometry
            BitmapSource depthBmp = null;

            blobCount = 0;

            using (ColorImageFrame colorFrame = e.OpenColorImageFrame())
            {
                using (DepthImageFrame depthFrame = e.OpenDepthImageFrame())
                {
                    if (depthFrame != null)
                    {
                        blobCount = 0;

                        depthBmp = depthFrame.SliceDepthImage((int)sliderMin.Value, (int)sliderMax.Value);

                        Image <Bgr, Byte>  openCVImg  = new Image <Bgr, byte>(depthBmp.ToBitmap());
                        Image <Gray, byte> gray_image = openCVImg.Convert <Gray, byte>();

                        if (running)
                        {
                            wsheet.Cells[1, frameCount + 1].Value = "Frame " + frameCount;
                            frameCount++;
                            using (MemStorage stor = new MemStorage())
                            {
                                //Find contours with no holes try CV_RETR_EXTERNAL to find holes
                                Contour <System.Drawing.Point> contours = gray_image.FindContours(
                                    Emgu.CV.CvEnum.CHAIN_APPROX_METHOD.CV_CHAIN_APPROX_SIMPLE,
                                    Emgu.CV.CvEnum.RETR_TYPE.CV_RETR_EXTERNAL,
                                    stor);

                                //Conversion of depthPixels to skeletonPoints which contain all three dimensions in meters.
                                //The conversion and copying is assumed to be costly but there are no single pixel to single point conversion I could find.
                                depthFrame.CopyDepthImagePixelDataTo(depthPixels);
                                //mapper.MapDepthFrameToSkeletonFrame(depthFormat, depthPixels, skeletonPoints);

                                for (int i = 0; contours != null; contours = contours.HNext)
                                {
                                    i++;

                                    if ((contours.Area > Math.Pow(sliderMinSize.Value, 2)) && (contours.Area < Math.Pow(sliderMaxSize.Value, 2)))
                                    {
                                        MCvBox2D box = contours.GetMinAreaRect();
                                        //DrQ RED BOX AROUND BLOB
                                        openCVImg.Draw(box, new Bgr(System.Drawing.Color.Red), 2);
                                        blobCount++;
                                        int             x = (int)box.center.X;
                                        int             y = (int)box.center.Y;
                                        DepthImagePoint p = new DepthImagePoint();
                                        p.X     = x;
                                        p.Y     = y;
                                        p.Depth = depthPixels[x + 640 * y].Depth;
                                        SkeletonPoint s = mapper.MapDepthPointToSkeletonPoint(depthFormat, p);

                                        //TODO Conversion from absolute coordinates to relative coordinates

                                        addCoordData(3 * blobCount - 1, frameCount, s.X, s.Y, s.Z);

                                        /*if (KinectSensor.IsKnownPoint(s))
                                         * {
                                         *  addCoordData(3 * blobCount - 1, frameCount, s.X, s.Y, s.Z);
                                         * }*/
                                    }
                                }
                            }
                        }

                        this.outImg.Source = ImageHelpers.ToBitmapSource(openCVImg);
                        txtBlobCount.Text  = blobCount.ToString();

                        getNext().RunWorkerAsync(openCVImg);
                    }
                }


                if (colorFrame != null)
                {
                    colorFrame.CopyPixelDataTo(this.colorPixels);
                    this.colorBitmap.WritePixels(
                        new Int32Rect(0, 0, this.colorBitmap.PixelWidth, this.colorBitmap.PixelHeight),
                        this.colorPixels,
                        this.colorBitmap.PixelWidth * sizeof(int),
                        0);
                }
            }
        }
Beispiel #53
0
        /// <summary>
        /// Event handler for Kinect sensor's DepthFrameReady event
        /// </summary>
        /// <param name="sender">object sending the event</param>
        /// <param name="e">event arguments</param>
        private void SensorAllFramesReady(object sender, AllFramesReadyEventArgs e)
        {
            // in the middle of shutting down, or lingering events from previous sensor, do nothing here.
            if (null == this.sensorChooser || null == this.sensorChooser.Kinect || this.sensorChooser.Kinect != sender)
            {
                return;
            }

            try
            {
                using (var depthFrame = e.OpenDepthImageFrame())
                {
                    if (null != depthFrame)
                    {
                        this.backgroundRemovedColorStream.ProcessDepth(depthFrame.GetRawPixelData(), depthFrame.Timestamp);
                    }
                }

                using (var colorFrame = e.OpenColorImageFrame())
                {
                    if (null != colorFrame)
                    {
                        this.backgroundRemovedColorStream.ProcessColor(colorFrame.GetRawPixelData(), colorFrame.Timestamp);
                    }
                }

                using (var skeletonFrame = e.OpenSkeletonFrame())
                {
                    if (null != skeletonFrame)
                    {
                        skeletonFrame.CopySkeletonDataTo(this.skeletons);
                        this.backgroundRemovedColorStream.ProcessSkeleton(this.skeletons, skeletonFrame.Timestamp);
                    }
                }

                this.ChooseSkeleton();
            }
            catch (InvalidOperationException)
            {
                // Ignore the exception. 
            }
        }
Beispiel #54
0
        private void Kinect_AllFramesReady(object sender, AllFramesReadyEventArgs allFramesReadyEventArgs)
        {
            ColorImageFrame colorImageFrame = null;
            DepthImageFrame depthImageFrame = null;
            SkeletonFrame   skeletonFrame   = null;

            try
            {
                colorImageFrame = allFramesReadyEventArgs.OpenColorImageFrame();
                depthImageFrame = allFramesReadyEventArgs.OpenDepthImageFrame();
                skeletonFrame   = allFramesReadyEventArgs.OpenSkeletonFrame();

                if (colorImageFrame == null || depthImageFrame == null || skeletonFrame == null)
                {
                    return;
                }

                // Check for image format changes.  The FaceTracker doesn't
                // deal with that so we need to reset.
                if (this.depthImageFormat != depthImageFrame.Format)
                {
                    this.depthImage       = null;
                    this.depthImageFormat = depthImageFrame.Format;
                }

                if (this.colorImageFormat != colorImageFrame.Format)
                {
                    this.colorImage       = null;
                    this.colorImageFormat = colorImageFrame.Format;
                }

                // Create any buffers to store copies of the data we work with
                if (this.depthImage == null)
                {
                    this.depthImage = new short[depthImageFrame.PixelDataLength];
                }

                if (this.colorImage == null)
                {
                    this.colorImage = new byte[colorImageFrame.PixelDataLength];
                }

                // Get the skeleton information
                if (this.skeletonData == null || this.skeletonData.Length != skeletonFrame.SkeletonArrayLength)
                {
                    this.skeletonData = new Skeleton[skeletonFrame.SkeletonArrayLength];
                }


                // TODO look into using the Timestamp on each frame
                var time = pipeline.GetCurrentTime();

                var sharedColorImage = ImagePool.GetOrCreate(colorImageFrame.Width, colorImageFrame.Height, Imaging.PixelFormat.BGRX_32bpp);
                var sharedDepthImage = ImagePool.GetOrCreate(depthImageFrame.Width, depthImageFrame.Height, Imaging.PixelFormat.Gray_16bpp);

                colorImageFrame.CopyPixelDataTo(sharedColorImage.Resource.ImageData, (colorImageFrame.Width * colorImageFrame.Height * 4));
                this.ColorImage.Post(sharedColorImage, time);

                //depthImageFrame.CopyPixelDataTo(sharedDepthImage.Resource.ImageData, (depthImageFrame.Width * depthImageFrame.Height * 2));
                depthImageFrame.CopyPixelDataTo(sharedDepthImage.Resource.ImageData, depthImageFrame.PixelDataLength);
                this.DepthImage.Post(sharedDepthImage, time);


                skeletonFrame.CopySkeletonDataTo(this.skeletonData);
                this.Skeletons.Post(this.skeletonData.ToList(), time);
            }
            catch
            {
                // TODO catch a cold
            }
        }
Beispiel #55
0
    void kinectRuntime_AllFrameReady(object sender, AllFramesReadyEventArgs e) {
      // If replaying, bypass this.
      if (replay != null && !replay.IsFinished)
        return;

      using (var cf = e.OpenColorImageFrame())
      using (var df = e.OpenDepthImageFrame())
      using (var sf = e.OpenSkeletonFrame()) {
        try {
          if (recorder != null && sf != null && df != null && cf != null) {
            recorder.Record(sf, df, cf);
          }
        } catch (ObjectDisposedException) { }

        if (cf != null)
          colorManager.Update(cf, !displayDebug);

        if (df != null) {
          depthFrameNumber = df.FrameNumber;
          depthManager.Update(df);
        }

        if (sf != null) {
          UpdateSkeletonDisplay(sf);
          if (handTracker != null && recogEngine != null) {
            var result = handTracker.Update(depthManager.PixelData, colorManager.PixelData,
              SkeletonUtil.FirstTrackedSkeleton(sf.GetSkeletons()));
            var gesture = recogEngine.Update(result);
            lock (inputServer)
              inputServer.Send(gesture);
            UpdateDisplay(result);
            textGestureEvent.Text = gesture;
            fpsCounter.LogFPS();
          }
        }
      }
    }
Beispiel #56
0
        /// <summary>
        /// Event handler for Kinect sensor's AllFramesReady event.
        /// </summary>
        /// <param name="sender">Object sending the event.</param>
        /// <param name="e">Event arguments.</param>
        private void SensorAllFramesReady(object sender, AllFramesReadyEventArgs e)
        {
            // Already shutting down, or lingering events from previous sensor: do nothing here.
            if (null == this.sensor || this.sensor != sender)
            {
                return;
            }

            try
            {
                if (this.IsTracked)
                {
                    using (var depthFrame = e.OpenDepthImageFrame())
                    {
                        if (null != depthFrame)
                        {
                            // Process depth data for background removal.
                            this.backgroundRemovedColorStream.ProcessDepth(
                                depthFrame.GetRawPixelData(),
                                depthFrame.Timestamp);
                        }
                    }

                    using (var colorFrame = e.OpenColorImageFrame())
                    {
                        if (null != colorFrame)
                        {
                            // Process color data for background removal.
                            this.backgroundRemovedColorStream.ProcessColor(
                                colorFrame.GetRawPixelData(),
                                colorFrame.Timestamp);
                        }
                    }

                    using (var skeletonFrame = e.OpenSkeletonFrame())
                    {
                        if (null != skeletonFrame)
                        {
                            // Save skeleton frame data for subsequent processing.
                            this.CopyDataFromSkeletonFrame(skeletonFrame);

                            // Locate the most recent data in which this user was fully tracked.
                            bool isUserPresent = this.UpdateTrackedSkeletonsArray();

                            // If we have an array in which this user is fully tracked,
                            // process the skeleton data for background removal.
                            if (isUserPresent && null != this.skeletonsTracked)
                            {
                                this.backgroundRemovedColorStream.ProcessSkeleton(
                                    this.skeletonsTracked,
                                    skeletonFrame.Timestamp);
                            }
                        }
                    }
                }
            }
            catch (InvalidOperationException)
            {
                // Ignore the exception.
            }
        }
 /// <summary>
 /// Event handler for Kinect sensor's SkeletonFrameReady event
 /// </summary>
 /// <param name="sender">object sending the event</param>
 /// <param name="e">event arguments</param>
 private void SensorAllFramesReady(object sender, AllFramesReadyEventArgs e)
 {
     if (shuttingDown)
     {
         return;
     }
     KinectSensor sensor = (KinectSensor)sender;
     ColorImageFrame colorFrame = e.OpenColorImageFrame();
     if (colorFrame == null) return;
     if (colorPixelData[sensor] == null)
         colorPixelData[sensor] = new byte[colorFrame.PixelDataLength];
     colorFrame.CopyPixelDataTo(colorPixelData[sensor]);
     colorFrame = null;
     DepthImageFrame depthFrame = e.OpenDepthImageFrame();
     if (depthFrame == null) return;
     if (depthPixelData[sensor] == null)
         depthPixelData[sensor] = new short[depthFrame.PixelDataLength];
     depthFrame.CopyPixelDataTo(depthPixelData[sensor]);
     depthFrame = null;
     SkeletonFrame skeletonFrame = e.OpenSkeletonFrame();
     if (skeletonFrame == null) return;
     skeletonFrame.CopySkeletonDataTo(skeletons[sensor]);
     skeletonFrame = null;
     SensorFrameHelper(sensor, true);
 }
Beispiel #58
0
        void FramesReady(object sender, AllFramesReadyEventArgs e)
        {
            DepthImageFrame imageFrame = e.OpenDepthImageFrame();
            ColorImageFrame rgbFrame   = e.OpenColorImageFrame();

            if (imageFrame != null && rgbFrame != null)
            {
                int   targetPixelDist = int.MaxValue;
                int[] targetPixelPos  = { imageFrame.Width / 2, imageFrame.Height / 2 };

                short[] pixelData = new short[imageFrame.PixelDataLength];
                imageFrame.CopyPixelDataTo(pixelData);
                Parallel.For(0, imageFrame.Height, new ParallelOptions {
                    MaxDegreeOfParallelism = 16
                }, y =>                                                                                     //int x = 0; x < imageFrame.Width; x++)
                {
                    //ColorImagePoint temp = imageFrame.MapToColorImagePoint(50, y, ColorImageFormat.RgbResolution640x480Fps30);
                    for (int x = 0; x < imageFrame.Width; x++)
                    {
                        if ((ushort)((pixelData[x + y * 640]) >> 3) < maxDist && (ushort)((pixelData[x + y * 640]) >> 3) > minDist)
                        {
                            //Check if pixel is closest one
                            if ((ushort)((pixelData[x + y * 640]) >> 3) < targetPixelDist)
                            {
                                targetPixelDist   = (ushort)((pixelData[x + y * 640]) >> 3); //When you think you're going to use a really long and ugly value once but it turns out you use it a bunch
                                targetPixelPos[0] = x;
                                targetPixelPos[1] = y;
                            }
                        }
                    }
                });
                pictureBox1.Image = ImageToBitmap(rgbFrame);
                Graphics drawStats = Graphics.FromImage(pictureBox1.Image);
                drawStats.FillEllipse(Brushes.Red, targetPixelPos[0], targetPixelPos[1], 10, 10);

                //Start figuring out where to move it if on a frame that is multiple of 4
                frameCount++;
                if (frameCount >= 4)
                {
                    while (port.BytesToRead > 0)
                    {
                        port.ReadByte(); //Throw the bytes away
                    }
                    frameCount = 0;
                    if (targetPixelPos[0] != imageFrame.Width / 2 && targetPixelPos[1] != imageFrame.Height / 2)
                    {
                        //Detected something
                        numberOfTimesDetectedSomething++;
                    }
                    else //No target found
                    {
                        numberOfTimesDetectedNothing++;
                    }
                    if (numberOfTimesDetectedSomething >= 3)
                    {
                        if (numberOfTimesDetectedNothing > 3)
                        {
                            numberOfTimesDetectedNothing = 0;
                        }

                        if (targetPixelPos[1] < (imageFrame.Height / 2) - threshold)
                        {
                            yawLabel.Text = "Up";
                            port.WriteLine("U");
                            port.ReadLine();
                        }
                        else if (targetPixelPos[1] > (imageFrame.Height / 2) + threshold)
                        {
                            yawLabel.Text = "Down";
                            port.WriteLine("D");
                            port.ReadLine();
                        }
                        else
                        {
                            yawLabel.Text = "Target yaw";
                        }

                        //Pitch
                        if (targetPixelPos[0] < (imageFrame.Width / 2) - threshold)
                        {
                            pitchLabel.Text = "Left";
                            port.WriteLine("R");
                            port.ReadLine();
                        }
                        else if (targetPixelPos[0] > (imageFrame.Width / 2) + threshold)
                        {
                            pitchLabel.Text = "Right";
                            port.WriteLine("L");
                            port.ReadLine();
                        }
                        else
                        {
                            pitchLabel.Text = "Target pitch";
                        }
                    }

                    /*if(numberOfTimesDetectedNothing >= 3)
                     * {
                     *  if(numberOfTimesDetectedSomething > 3) numberOfTimesDetectedSomething = 0; //If statement fixes it, no idea why
                     *  //Start scanning for drones
                     *  //Set yaw correctly
                     *  port.WriteLine("P"); //P means set for scanning edit: I should have used S
                     *  port.ReadLine();
                     *  port.WriteLine("L");
                     *  port.ReadLine();
                     * }*/
                }
            }
        }
Beispiel #59
0
        void sensor_allFramesReady(object sender, AllFramesReadyEventArgs e)
        {
            if (windowClosing)
            {
                return;
            }
            int value;
            if (int.TryParse(this.textBox_init.Text, out value))
            {
                initFrames = value;
            }

            if (fpsEnd == 1)
            {
                //FPS Suggestion. Bei niedrigen Frameraten werden empfangene Frames übersprungen (nicht angezeigt)
                Int16 fps = Convert.ToInt16(this.dropDown_fps.Text);
                switch (fps)
                {
                    case 30:
                        fpsEnd = 1;
                        break;
                    case 15:
                        fpsEnd = 2;
                        break;
                    case 10:
                        fpsEnd = 3;
                        break;
                    case 5:
                        fpsEnd = 6;
                        break;
                    case 1:
                        fpsEnd = 30;
                        break;
                }

                using (ColorImageFrame colorFrame = e.OpenColorImageFrame())
                {
                    if (colorFrame != null)
                    {
                        // Kinect Color Frame to Bitmap
                        tempColorFrame = ColorImageFrameToBitmap(colorFrame);
                        this.pictureBox_colorPic.BackgroundImage = tempColorFrame; // this.pictureBox_colorPic.Image = new Bitmap(tempColorFrame, this.pictureBox_colorPic.Width, this.pictureBox_colorPic.Height);

                        // Too slow ifnot scaled down by 10x
                        //this.pictureBox_colorPic.BackgroundImage = BitmapManipulator.ConvertImageToHeatmapBitMap(BitmapManipulator.ScaleBitmap(tempColorFrame, 0.1, 0.1, System.Drawing.Drawing2D.InterpolationMode.Default), Heatmap, false);
                    }
                }

                using (DepthImageFrame depthFrame = e.OpenDepthImageFrame())
                {
                    if (depthFrame != null)
                    {
                        // Kinect Depth Frame to Bitmap
                        Bitmap tempDepthFrame = DepthImageFrameToBitmap(depthFrame);
                        this.pictureBox_depthPic.BackgroundImage = tempDepthFrame; // this.pictureBox_depthPic.Image = new Bitmap(tempDepthFrame, this.pictureBox_depthPic.Width, this.pictureBox_depthPic.Height);

                        objectsFound = 0;

                        depthBmp = ImageHelpers.SliceDepthImage(depthFrame, (int)trackBarDepthMinDistance.Value, (int)trackBarDepthMaxDistance.Value);

                        Image<Bgr, Byte> openCVImg = new Image<Bgr, byte>(depthBmp);
                        Image<Gray, byte> gray_image = openCVImg.Convert<Gray, byte>();

                        using (MemStorage stor = new MemStorage())
                        {
                            //Find contours with no holes try CV_RETR_EXTERNAL to find holes
                            Contour<System.Drawing.Point> contours = gray_image.FindContours(
                             Emgu.CV.CvEnum.CHAIN_APPROX_METHOD.CV_CHAIN_APPROX_SIMPLE,
                             Emgu.CV.CvEnum.RETR_TYPE.CV_RETR_EXTERNAL,
                             stor);

                            for (int i = 0; contours != null; contours = contours.HNext)
                            {
                                i++;

                                if ((contours.Area > Math.Pow(trackBarObjectMinSize.Value, 2)) && (contours.Area < Math.Pow(trackBarObjectMaxSize.Value, 2)))
                                {
                                    MCvBox2D box = contours.GetMinAreaRect();
                                    openCVImg.Draw(box, new Bgr(System.Drawing.Color.Red), 2);
                                    objectsFound++;
                                }
                            }
                        }

                        pictureBox_depthPicSmoothed.BackgroundImage = openCVImg.Bitmap;
                        richTextBoxObjectFound.Text = objectsFound.ToString();
                        // The Smoothed Image will apply both a filter and a weighted moving average over the
                        // depth data from the Kinect (depending on UI selections)
                        //pictureBox_depthPicSmoothed.BackgroundImage = CreateSmoothImageFromDepthArray(tempDepthFrame); // pictureBox_depthPicSmoothed.Image = CreateSmoothImageFromDepthArray(tempDepthFrame);

                        // Create a pseudo color of the depth map
                        // Too slow for 30 frames per second if not scaled down by 10x
                        //this.pictureBox_depthPicSmoothed.BackgroundImage = BitmapManipulator.ConvertImageToHeatmapBitMap(BitmapManipulator.ScaleBitmap(tempDepthFrame, 0.1, 0.1, System.Drawing.Drawing2D.InterpolationMode.Default), Heatmap, false);

                        // Create a byte arrya of the depth data
                        //ConvertDepthFrameData(sender, depthFrame);

                        DepthImagePixel[] depth = depthFrame.GetRawPixelData();
                        pictureBox_colorPic.BackgroundImage = LightBuzz.Vitruvius.WinForms.DepthExtensions.ToBitmap(depthFrame, PixelFormat.Format32bppRgb, LightBuzz.Vitruvius.WinForms.DepthImageMode.Colors);
                    }
                }

                using (SkeletonFrame skelFrame = e.OpenSkeletonFrame())
                {
                    if (skelFrame != null)
                    {
                        Image tempSkeletonFrame = new Bitmap(this.pictureBox_skeleton.Width, this.pictureBox_skeleton.Height);
                        // make the background black if there is no image
                        this.pictureBox_skeleton.BackColor = Color.Black;
                        this.pictureBox_skeleton.BackgroundImage = null;

                        if (checkBox_colorCam.Checked)
                        {
                            tempSkeletonFrame = new Bitmap(tempColorFrame);
                        }

                        if (this.checkBoxShowSkeleton.Checked)
                        {
                            Skeleton[] skeletons = new Skeleton[skelFrame.SkeletonArrayLength];
                            skelFrame.CopySkeletonDataTo(skeletons);
                            if (skeletons.Length != 0)
                            {
                                foreach (Skeleton skel in skeletons)
                                {
                                    if (skel.TrackingState == SkeletonTrackingState.Tracked)
                                    {
                                        //Zeichne Skelett
                                        DrawSkeletons(tempSkeletonFrame, skel);

                                        if (skel != null)
                                        {
                                            double height = LightBuzz.Vitruvius.SkeletonExtensions.Height(skel);
                                        }

                                        if (NGEFile != null)
                                        {
                                            if (NGEFile.isRecording == true && NGEFile.isInitializing == true)
                                            {
                                                NGEFile.Entry(skel);

                                                if (NGEFile.intializingCounter > initFrames)
                                                {
                                                    NGEFile.startWritingEntry();
                                                }

                                            }

                                            if (NGEFile.isRecording == true && NGEFile.isInitializing == false)
                                            {
                                                NGEFile.Motion(skel);
                                                this.textBox_sensorStatus.Text = "Record";
                                                this.textBox_sensorStatus.BackColor = Color.Green;
                                            }
                                        }
                                    }
                                }
                            }
                            this.pictureBox_skeleton.BackgroundImage = tempSkeletonFrame; // this.pictureBox_skeleton.Image = tempSkeletonFrame;
                            //this.pictureBox_skeleton.Image = new Bitmap(tempSkeletonFrame, this.pictureBox_skeleton.Width, this.pictureBox_skeleton.Height);
                        }
                    }
                }
            }
            else
            {
                fpsEnd -= 1;
            }
            UpdateFps();
        }
Beispiel #60
0
        private void AllFramesReady(object sender, AllFramesReadyEventArgs allFramesReadyEventArgs)
        {
            ColorImageFrame colorImageFrame = null;
            DepthImageFrame depthImageFrame = null;
            SkeletonFrame   skeletonFrame   = null;

            try
            {
                colorImageFrame = allFramesReadyEventArgs.OpenColorImageFrame();
                depthImageFrame = allFramesReadyEventArgs.OpenDepthImageFrame();
                skeletonFrame   = allFramesReadyEventArgs.OpenSkeletonFrame();

                if (colorImageFrame == null || depthImageFrame == null || skeletonFrame == null)
                {
                    return;
                }

                // Check for changes in any of the data this function is receiving
                // and reset things appropriately.
                if (this.depthImageFormat != depthImageFrame.Format)
                {
                    this.DestroyFaceTracker();
                    this.depthImage       = null;
                    this.depthImageFormat = depthImageFrame.Format;
                }

                if (this.colorImageFormat != colorImageFrame.Format)
                {
                    this.DestroyFaceTracker();
                    this.colorImage               = null;
                    this.colorImageFormat         = colorImageFrame.Format;
                    this.colorImageWritableBitmap = null;
                    this.ColorImage.Source        = null;
                    this.theMaterial.Brush        = null;
                }

                if (this.skeletonData != null && this.skeletonData.Length != skeletonFrame.SkeletonArrayLength)
                {
                    this.skeletonData = null;
                }

                // Create any buffers to store copies of the data we work with
                if (this.depthImage == null)
                {
                    this.depthImage = new short[depthImageFrame.PixelDataLength];
                }

                if (this.colorImage == null)
                {
                    this.colorImage = new byte[colorImageFrame.PixelDataLength];
                }

                if (this.colorImageWritableBitmap == null)
                {
                    this.colorImageWritableBitmap = new WriteableBitmap(
                        colorImageFrame.Width, colorImageFrame.Height, 96, 96, PixelFormats.Bgr32, null);
                    this.ColorImage.Source = this.colorImageWritableBitmap;
                    this.theMaterial.Brush = new ImageBrush(this.colorImageWritableBitmap)
                    {
                        ViewportUnits = BrushMappingMode.Absolute
                    };
                }

                if (this.skeletonData == null)
                {
                    this.skeletonData = new Skeleton[skeletonFrame.SkeletonArrayLength];
                }

                // Copy data received in this event to our buffers.
                colorImageFrame.CopyPixelDataTo(this.colorImage);
                depthImageFrame.CopyPixelDataTo(this.depthImage);
                skeletonFrame.CopySkeletonDataTo(this.skeletonData);
                this.colorImageWritableBitmap.WritePixels(
                    new Int32Rect(0, 0, colorImageFrame.Width, colorImageFrame.Height),
                    this.colorImage,
                    colorImageFrame.Width * Bgr32BytesPerPixel,
                    0);

                // Find a skeleton to track.
                // First see if our old one is good.
                // When a skeleton is in PositionOnly tracking state, don't pick a new one
                // as it may become fully tracked again.
                Skeleton skeletonOfInterest =
                    this.skeletonData.FirstOrDefault(
                        skeleton =>
                        skeleton.TrackingId == this.trackingId &&
                        skeleton.TrackingState != SkeletonTrackingState.NotTracked);

                if (skeletonOfInterest == null)
                {
                    // Old one wasn't around.  Find any skeleton that is being tracked and use it.
                    skeletonOfInterest =
                        this.skeletonData.FirstOrDefault(
                            skeleton => skeleton.TrackingState == SkeletonTrackingState.Tracked);

                    if (skeletonOfInterest != null)
                    {
                        // This may be a different person so reset the tracker which
                        // could have tuned itself to the previous person.
                        if (this.faceTracker != null)
                        {
                            this.faceTracker.ResetTracking();
                        }

                        this.trackingId = skeletonOfInterest.TrackingId;
                    }
                }

                if (skeletonOfInterest != null && skeletonOfInterest.TrackingState == SkeletonTrackingState.Tracked)
                {
                    if (this.faceTracker == null)
                    {
                        try
                        {
                            this.faceTracker = new FaceTracker(this.Kinect);
                        }
                        catch (InvalidOperationException)
                        {
                            // During some shutdown scenarios the FaceTracker
                            // is unable to be instantiated.  Catch that exception
                            // and don't track a face.
                            Debug.WriteLine("AllFramesReady - creating a new FaceTracker threw an InvalidOperationException");
                            this.faceTracker = null;
                        }
                    }

                    if (this.faceTracker != null)
                    {
                        FaceTrackFrame faceTrackFrame = this.faceTracker.Track(
                            this.colorImageFormat,
                            this.colorImage,
                            this.depthImageFormat,
                            this.depthImage,
                            skeletonOfInterest);

                        if (faceTrackFrame.TrackSuccessful)
                        {
                            if (!visited)
                            {
                                visited = true;
                                //counter.Text = "60 seconds";
                                aTimer.Interval = 1000;
                                aTimer.Tick    += new EventHandler(aTimer_Tick);
                                aTimer.Start();
                            }
                            if (saveModel)
                            {
                                saveDepthImagebmp(depthImageFrame);
                                saveColorImage(colorImageFrame.Width, colorImageFrame.Height, (colorImageFrame.Width * Bgr32BytesPerPixel));
                                saveFaceModel();
                            }
                        }
                    }
                }
                else
                {
                    this.trackingId = -1;
                }
            }
            finally
            {
                if (colorImageFrame != null)
                {
                    colorImageFrame.Dispose();
                }

                if (depthImageFrame != null)
                {
                    depthImageFrame.Dispose();
                }

                if (skeletonFrame != null)
                {
                    skeletonFrame.Dispose();
                }
            }
        }