コード例 #1
0
 public CoupleFrames(ColorImageFrame cif, Bitmap df)
 {
     this.colorFrame   = cif;
     this.desktopFrame = df;
 }
コード例 #2
0
ファイル: KinectFaceNode.cs プロジェクト: vnmone/vvvv-sdk
        void KinectFaceNode_AllFrameReady(object sender, AllFramesReadyEventArgs e)
        {
            ColorImageFrame colorImageFrame = null;
            DepthImageFrame depthImageFrame = null;
            SkeletonFrame   skeletonFrame   = null;

            if (face == null)
            {
                face = new FaceTracker(this.runtime.Runtime);
            }

            colorImageFrame = e.OpenColorImageFrame();
            depthImageFrame = e.OpenDepthImageFrame();
            skeletonFrame   = e.OpenSkeletonFrame();

            if (colorImageFrame == null || depthImageFrame == null || skeletonFrame == null)
            {
                return;
            }

            if (this.depthImage == null)
            {
                this.depthImage = new short[depthImageFrame.PixelDataLength];
            }

            if (this.colorImage == null)
            {
                this.colorImage = new byte[colorImageFrame.PixelDataLength];
            }

            if (this.skeletonData == null || this.skeletonData.Length != skeletonFrame.SkeletonArrayLength)
            {
                this.skeletonData = new Skeleton[skeletonFrame.SkeletonArrayLength];
            }

            colorImageFrame.CopyPixelDataTo(this.colorImage);
            depthImageFrame.CopyPixelDataTo(this.depthImage);
            skeletonFrame.CopySkeletonDataTo(this.skeletonData);

            foreach (Skeleton skeleton in this.skeletonData)
            {
                if (skeleton.TrackingState == SkeletonTrackingState.Tracked ||
                    skeleton.TrackingState == SkeletonTrackingState.PositionOnly)
                {
                    // We want keep a record of any skeleton, tracked or untracked.
                    if (!this.trackedSkeletons.ContainsKey(skeleton.TrackingId))
                    {
                        this.trackedSkeletons.Add(skeleton.TrackingId, new SkeletonFaceTracker());
                    }

                    // Give each tracker the upated frame.
                    SkeletonFaceTracker skeletonFaceTracker;
                    if (this.trackedSkeletons.TryGetValue(skeleton.TrackingId, out skeletonFaceTracker))
                    {
                        skeletonFaceTracker.OnFrameReady(this.runtime.Runtime, ColorImageFormat.RgbResolution640x480Fps30, colorImage, DepthImageFormat.Resolution640x480Fps30, depthImage, skeleton);
                        skeletonFaceTracker.LastTrackedFrame = skeletonFrame.FrameNumber;
                    }
                }
            }

            this.RemoveOldTrackers(skeletonFrame.FrameNumber);

            colorImageFrame.Dispose();
            depthImageFrame.Dispose();
            skeletonFrame.Dispose();

            this.FInvalidate = true;
        }
コード例 #3
0
        private void OnAllFrameReady(object sender, AllFramesReadyEventArgs e)
        {
            if (this.isShutDown)
            {
                return;
            }

            #region Skeleton Data Process Region
            // 1. Process Skeleton Data
            // Grab Skeleton Frame
            using (SkeletonFrame skeletonFrame = e.OpenSkeletonFrame())
            {
                if (skeletonFrame != null && this.skeletons != null)
                {
                    // Copy Skeleton Datas
                    skeletonFrame.CopySkeletonDataTo(skeletons);
                }
            }
            #endregion

            #region Depth Data Process Region
            // 2. Process Depth Data
            // Grab Depth Frame
            short depthValue;
            using (DepthImageFrame depthFrame = e.OpenDepthImageFrame())
            {
                if (depthFrame != null)
                {
                    // Copy Depth Pixel Data
                    depthFrame.CopyDepthImagePixelDataTo(this.depthPixels);

                    // Convert Depth to RGB
                    for (int i = 0; i < this.depthPixels.Length; i++)
                    {
                        // Get Depth Value For This Pixel
                        depthValue = depthPixels[i].Depth;

                        unsafe
                        {
                            if (depthValue >= this.maxDepth || depthValue <= this.minDepth)
                            {
                                this.depthImage.ImageDataPtr[i] = (byte)0;
                            }
                            else
                            {
                                this.depthImage.ImageDataPtr[i] = this.depthLookupTable[depthValue];
                            }
                        }
                    }

                    if (this.leftHand.TrackingState == JointTrackingState.Tracked)
                    {
                        DepthImagePoint ldHand = this.sensor.CoordinateMapper.MapSkeletonPointToDepthPoint(this.leftHand.Position, DepthImageFormat.Resolution640x480Fps30);

                        int topleft_x = ldHand.X - Configuration.HAND_REGION_WIDTH / 2;
                        int topleft_y = ldHand.Y - Configuration.HAND_REGION_HEIGHT / 2;

                        if (topleft_x < 0)
                        {
                            topleft_x = 0;
                        }
                        if (topleft_x + Configuration.HAND_REGION_WIDTH >= this.sensor.ColorStream.FrameWidth)
                        {
                            topleft_x = this.sensor.ColorStream.FrameWidth - Configuration.HAND_REGION_WIDTH;
                        }
                        if (topleft_y < 0)
                        {
                            topleft_y = 0;
                        }
                        if (topleft_y + Configuration.HAND_REGION_HEIGHT >= this.sensor.ColorStream.FrameHeight)
                        {
                            topleft_y = this.sensor.ColorStream.FrameHeight - Configuration.HAND_REGION_HEIGHT;
                        }

                        CvRect ldHandRect = new CvRect(topleft_x, topleft_y, Configuration.HAND_REGION_WIDTH, Configuration.HAND_REGION_HEIGHT);
                        //Cv.Rectangle( this.depthImage, ldHandRect, new CvScalar( 0, 0, 255 ), 5 ); // Used for Visualization
                        Cv.SetImageROI(this.depthImage, ldHandRect);
                        Cv.Copy(this.depthImage, this.leftHandDepthImage);
                        Cv.ResetImageROI(this.depthImage);

                        // Filter Hand Depth Image
                        Cv.Smooth(this.leftHandDepthImage, this.leftHandDepthImage, SmoothType.Median);
                    }
                    if (this.rightHand.TrackingState == JointTrackingState.Tracked)
                    {
                        DepthImagePoint rdHand = this.sensor.CoordinateMapper.MapSkeletonPointToDepthPoint(this.rightHand.Position, DepthImageFormat.Resolution640x480Fps30);

                        int topleft_x = rdHand.X - Configuration.HAND_REGION_WIDTH / 2;
                        int topleft_y = rdHand.Y - Configuration.HAND_REGION_HEIGHT / 2;

                        if (topleft_x < 0)
                        {
                            topleft_x = 0;
                        }
                        if (topleft_x + Configuration.HAND_REGION_WIDTH >= this.sensor.ColorStream.FrameWidth)
                        {
                            topleft_x = this.sensor.ColorStream.FrameWidth - Configuration.HAND_REGION_WIDTH;
                        }
                        if (topleft_y < 0)
                        {
                            topleft_y = 0;
                        }
                        if (topleft_y + Configuration.HAND_REGION_HEIGHT >= this.sensor.ColorStream.FrameHeight)
                        {
                            topleft_y = this.sensor.ColorStream.FrameHeight - Configuration.HAND_REGION_HEIGHT;
                        }

                        CvRect rdHandRect = new CvRect(topleft_x, topleft_y, Configuration.HAND_REGION_WIDTH, Configuration.HAND_REGION_HEIGHT);
                        //Cv.Rectangle( this.depthImage, rdHandRect, new CvScalar( 0, 0, 255 ), 5 ); // Used for Visualization
                        Cv.SetImageROI(this.depthImage, rdHandRect);
                        Cv.Copy(this.depthImage, this.rightHandDepthImage);
                        Cv.ResetImageROI(this.depthImage);

                        // Filter Hand Depth Image
                        Cv.Smooth(this.rightHandDepthImage, this.rightHandDepthImage, SmoothType.Median);
                    }
                }
            }
            #endregion

            #region Color Data Process Region
            // 3. Process Color Data
            // Grab Color Frame
            using (ColorImageFrame colorFrame = e.OpenColorImageFrame())
            {
                if (colorFrame != null)
                {
                    // Copy Pixel Data
                    unsafe
                    {
                        colorFrame.CopyPixelDataTo(this.colorImage.ImageData, colorFrame.PixelDataLength);
                    }

                    // Draw Skeleton Position
                    if (skeletons != null)
                    {
                        // Find Appropriate Skeleton
                        Skeleton targetSkeleton = null;
                        for (int i = 0; i < skeletons.Length; i++)
                        {
                            // Skip Invalid State
                            if (skeletons[i] == null)
                            {
                                continue;
                            }

                            // Only Fully Tracked Skeleton
                            if (skeletons[i].TrackingState == SkeletonTrackingState.Tracked)
                            {
                                // Set Target Skeleton - If exists Set to nearest.
                                if (targetSkeleton == null)
                                {
                                    targetSkeleton = skeletons[i];
                                }
                                else if (targetSkeleton.Position.Z > skeletons[i].Position.Z)
                                {
                                    targetSkeleton = skeletons[i];
                                }
                            }
                        }

                        if (targetSkeleton != null)
                        {
                            // Left Hand Position
                            this.leftHand = targetSkeleton.Joints[JointType.HandLeft];
                            // Check Tracked Status
                            if (this.leftHand.TrackingState == JointTrackingState.Tracked)
                            {
                                this.leftHandFound = true;

                                leftHandPoint           = this.sensor.CoordinateMapper.MapSkeletonPointToColorPoint(leftHand.Position, ColorImageFormat.RgbResolution640x480Fps30);
                                this.leftHandPosition.X = leftHandPoint.X;
                                this.leftHandPosition.Y = leftHandPoint.Y;
                                int topleft_x = leftHandPoint.X - Configuration.HAND_REGION_WIDTH / 2;
                                int topleft_y = leftHandPoint.Y - Configuration.HAND_REGION_HEIGHT / 2;

                                // Bound Check
                                if (topleft_x < 0)
                                {
                                    topleft_x = 0;
                                }
                                if (topleft_x + Configuration.HAND_REGION_WIDTH >= this.sensor.ColorStream.FrameWidth)
                                {
                                    topleft_x = this.sensor.ColorStream.FrameWidth - Configuration.HAND_REGION_WIDTH;
                                }
                                if (topleft_y < 0)
                                {
                                    topleft_y = 0;
                                }
                                if (topleft_y + Configuration.HAND_REGION_HEIGHT >= this.sensor.ColorStream.FrameHeight)
                                {
                                    topleft_y = this.sensor.ColorStream.FrameHeight - Configuration.HAND_REGION_HEIGHT;
                                }

                                // Set Hand Position
                                leftHandRect = new CvRect(topleft_x, topleft_y, Configuration.HAND_REGION_WIDTH, Configuration.HAND_REGION_HEIGHT);
                                Cv.Rectangle(this.colorImage, this.leftHandRect, new CvScalar(0, 0, 255), 1);                                     // Used for Visualization
                                Cv.SetImageROI(this.colorImage, this.leftHandRect);
                                // Copy Data
                                Cv.Copy(this.colorImage, this.leftHandImage);
                                // Reset ROI
                                Cv.ResetImageROI(this.colorImage);

                                // Smooth Color Hand Image
                                Cv.Smooth(this.leftHandImage, this.leftHandImage, SmoothType.Median);

                                // Only Hand Region
                                CropNear(this.leftHandDepthImage);

                                // Filter With Depth Image
                                FilterFarObjects(this.leftHandImage, this.leftHandDepthImage);

                                // Detect By Skin Color Model
                                this.skinDetector.FilterSkinColorRegion(this.leftHandImage, this.leftHandSkinImage, 0.25f);
                                // Smooth Color Hand Skin Image
                                //Cv.Smooth( this.leftHandSkinImage, this.leftHandSkinImage, SmoothType.Median );
                                Cv.Smooth(this.leftHandSkinImage, this.leftHandSkinImage, SmoothType.Median, 5);
                                Cv.Erode(this.leftHandSkinImage, this.leftHandSkinImage);
                                Cv.Dilate(this.leftHandSkinImage, this.leftHandSkinImage);


                                // Find Object
                                Cv.Sub(this.leftHandDepthImage, this.leftHandSkinImage, this.leftObjectRemainedImage);
                                Cv.Erode(this.leftObjectRemainedImage, this.leftObjectRemainedImage);
                                Cv.Smooth(this.leftObjectRemainedImage, this.leftObjectRemainedImage, SmoothType.Median);

                                // Filter Objects Only
                                FilterFarObjects(this.leftHandImage, this.leftObjectRemainedImage);

                                Cv.CvtColor(this.leftHandImage, this.leftObjectHSVImage, ColorConversion.BgraToBgr);
                                Cv.CvtColor(this.leftObjectHSVImage, this.leftObjectHSVImage, ColorConversion.BgrToHsv);
                                Cv.Split(this.leftObjectHSVImage, this.leftObjectHImage, this.leftObjectSImage, this.leftObjectVImage, null);
                            }
                            else
                            {
                                this.leftHandFound = false;
                            }

                            // Right Hand Position
                            this.rightHand = targetSkeleton.Joints[JointType.HandRight];
                            if (this.rightHand.TrackingState == JointTrackingState.Tracked)
                            {
                                this.rightHandFound = true;

                                rightHandPoint           = this.sensor.CoordinateMapper.MapSkeletonPointToColorPoint(rightHand.Position, ColorImageFormat.RgbResolution640x480Fps30);
                                this.rightHandPosition.X = rightHandPoint.X;
                                this.rightHandPosition.Y = rightHandPoint.Y;

                                int topleft_x = rightHandPoint.X - Configuration.HAND_REGION_WIDTH / 2;
                                int topleft_y = rightHandPoint.Y - Configuration.HAND_REGION_HEIGHT / 2;

                                // Bound Check
                                if (topleft_x < 0)
                                {
                                    topleft_x = 0;
                                }
                                if (topleft_x + Configuration.HAND_REGION_WIDTH >= this.sensor.ColorStream.FrameWidth)
                                {
                                    topleft_x = this.sensor.ColorStream.FrameWidth - Configuration.HAND_REGION_WIDTH;
                                }
                                if (topleft_y < 0)
                                {
                                    topleft_y = 0;
                                }
                                if (topleft_y + Configuration.HAND_REGION_HEIGHT >= this.sensor.ColorStream.FrameHeight)
                                {
                                    topleft_y = this.sensor.ColorStream.FrameHeight - Configuration.HAND_REGION_HEIGHT;
                                }

                                // Set Hand Position
                                rightHandRect = new CvRect(topleft_x, topleft_y, Configuration.HAND_REGION_WIDTH, Configuration.HAND_REGION_HEIGHT);
                                Cv.Rectangle(this.colorImage, this.rightHandRect, new CvScalar(0, 0, 255), 1);                                     // Used for Visualization
                                Cv.SetImageROI(this.colorImage, this.rightHandRect);
                                // Copy Data
                                Cv.Copy(this.colorImage, this.rightHandImage);
                                // Reset ROI
                                Cv.ResetImageROI(this.colorImage);

                                // Smooth Color Hand Image
                                Cv.Smooth(this.rightHandImage, this.rightHandImage, SmoothType.Median);

                                CropNear(this.rightHandDepthImage);

                                // Filter With Depth Image
                                FilterFarObjects(this.rightHandImage, this.rightHandDepthImage);

                                // Detect By Skin Color Model
                                this.skinDetector.FilterSkinColorRegion(this.rightHandImage, this.rightHandSkinImage, 0.25f);
                                // Smooth Color Hand Skin Image
                                //Cv.Smooth( this.rightHandSkinImage, this.rightHandSkinImage, SmoothType.Median );
                                Cv.Smooth(this.rightHandSkinImage, this.rightHandSkinImage, SmoothType.Median, 5);
                                Cv.Erode(this.rightHandSkinImage, this.rightHandSkinImage);
                                Cv.Dilate(this.rightHandSkinImage, this.rightHandSkinImage);

                                // Find Object
                                Cv.Sub(this.rightHandDepthImage, this.rightHandSkinImage, this.rightObjectRemainedImage);
                                Cv.Erode(this.rightObjectRemainedImage, this.rightObjectRemainedImage);
                                Cv.Smooth(this.rightObjectRemainedImage, this.rightObjectRemainedImage, SmoothType.Median);

                                // Filter Objects Only
                                FilterFarObjects(this.rightHandImage, this.rightObjectRemainedImage);

                                Cv.CvtColor(this.rightHandImage, this.rightObjectHSVImage, ColorConversion.BgraToBgr);
                                Cv.CvtColor(this.rightObjectHSVImage, this.rightObjectHSVImage, ColorConversion.BgrToHsv);
                                Cv.Split(this.rightObjectHSVImage, this.rightObjectHImage, this.rightObjectSImage, this.rightObjectVImage, null);
                            }
                            else
                            {
                                this.rightHandFound = false;
                            }
                        }
                    }
                    #endregion

                    #region Image Display Region
                    // Show Depth Image
                    Cv.ShowImage("Depth Image", this.depthImage);
                    Cv.ShowImage("Left Hand Depth Image", this.leftHandDepthImage);
                    Cv.ShowImage("Right Hand Depth Image", this.rightHandDepthImage);

                    // Show Color Image
                    Cv.ShowImage("Color Image", this.colorImage);
                    Cv.ShowImage("Left Hand Image", this.leftHandImage);
                    Cv.ShowImage("Right Hand Image", this.rightHandImage);
                    Cv.ShowImage("Left Hand Skin Image", this.leftHandSkinImage);
                    Cv.ShowImage("Right Hand Skin Image", this.rightHandSkinImage);

                    // Show Object Only Image
                    Cv.ShowImage("Left Hand Object Image", this.leftObjectRemainedImage);
                    Cv.ShowImage("Right Hand Object Image", this.rightObjectRemainedImage);

                    // Show Object HSV Image
                    Cv.ShowImage("Left Hand H Image", this.leftObjectHImage);
                    Cv.ShowImage("Left Hand S Image", this.leftObjectSImage);
                    Cv.ShowImage("Left Hand V Image", this.leftObjectVImage);
                    Cv.ShowImage("Right Hand H Image", this.rightObjectHImage);
                    Cv.ShowImage("Right Hand S Image", this.rightObjectSImage);
                    Cv.ShowImage("Right Hand V Image", this.rightObjectVImage);


                    #endregion
                }
            }
        }
コード例 #4
0
        void FramesReady(object sender, AllFramesReadyEventArgs e)
        {
            using (ColorImageFrame VFrame = e.OpenColorImageFrame())
            {
                if (VFrame == null)
                {
                    return;
                }
                byte[] pixelS = new byte[VFrame.PixelDataLength];
                Bitmap bmap   = ImageToBitmap(VFrame);

                if (!((KinectSensor)sender).SkeletonStream.IsEnabled)
                {
                    return;
                }

                using (SkeletonFrame SFrame = e.OpenSkeletonFrame())
                {
                    if (SFrame == null)
                    {
                        return;
                    }


                    Graphics g = Graphics.FromImage(bmap);

                    Skeleton[] Skeletons = new Skeleton[SFrame.SkeletonArrayLength];

                    SFrame.CopySkeletonDataTo(Skeletons);
                    Color cor = System.Drawing.Color.Aquamarine;

                    foreach (Skeleton S in Skeletons)
                    {
                        if (S.TrackingState == SkeletonTrackingState.Tracked)
                        {
                            //Desenha Corpo
                            DrawBone(JointType.Head, JointType.ShoulderCenter, S, g);
                            DrawBone(JointType.ShoulderCenter, JointType.Spine, S, g);
                            DrawBone(JointType.Spine, JointType.HipCenter, S, g);

                            //Desenha Braço Esquerdo
                            DrawBone(JointType.ShoulderCenter, JointType.ShoulderLeft, S, g);
                            DrawBone(JointType.ShoulderLeft, JointType.ElbowLeft, S, g);
                            DrawBone(JointType.ElbowLeft, JointType.WristLeft, S, g);
                            DrawBone(JointType.WristLeft, JointType.HandLeft, S, g);

                            //Desenha Braço Direito
                            DrawBone(JointType.ShoulderCenter, JointType.ShoulderRight, S, g);
                            DrawBone(JointType.ShoulderRight, JointType.ElbowRight, S, g);
                            DrawBone(JointType.ElbowRight, JointType.WristRight, S, g);
                            DrawBone(JointType.WristRight, JointType.HandRight, S, g);

                            //Desenha Perna Esquerda
                            DrawBone(JointType.HipCenter, JointType.HipRight, S, g);
                            DrawBone(JointType.HipRight, JointType.KneeRight, S, g);
                            DrawBone(JointType.KneeRight, JointType.AnkleRight, S, g);
                            DrawBone(JointType.AnkleRight, JointType.FootRight, S, g);

                            //Desenha Perna Direita
                            DrawBone(JointType.HipCenter, JointType.HipLeft, S, g);
                            DrawBone(JointType.HipLeft, JointType.KneeLeft, S, g);
                            DrawBone(JointType.KneeLeft, JointType.AnkleLeft, S, g);
                            DrawBone(JointType.AnkleLeft, JointType.FootLeft, S, g);

                            //Desenha VÉRTICES
                            foreach (Joint j in S.Joints)
                            {
                                DrawJoint(j.JointType, S, g, cor);
                            }

                            skeleton = S;
                            if (t == null)
                            {
                                t = new Thread(new ThreadStart(isFall));
                                t.Start();
                            }
                        }
                    }
                }
                campo.Image = bmap;
            }
        }
コード例 #5
0
        static void DepthFrameReady(Object sender, DepthImageFrame imageFrame, ColorImageFrame cimgframe, SkeletonPoint sLoc)
        {
            //Console.WriteLine("Depth");
            if (imageFrame != null && cimgframe != null)
            {
                //form.GetPictureBox1().Image = DepthToBitmap(imageFrame);

                if (takeSnapshot)
                {
                    Point[,] pointarr = new Point[(int)HEAD_SIZE_PIXELS * 2 + 1, (int)HEAD_SIZE_PIXELS * 2 + 1];
                    block             = true;
                    List <Point>  plist = new List <Point>();
                    Bitmap        bmap  = ImageToBitmap(cimgframe, false);
                    DateTime      d     = DateTime.Now;
                    Color         col;
                    List <String> slist = new List <String>();
                    String        dtm   = d.ToString();
                    String        dtmr  = "data" + dtm + "Reverse.txt";
                    dtm = "data" + dtm + ".txt";
                    Console.WriteLine(dtm);
                    CoordinateMapper cm = new CoordinateMapper(sensor);
                    snapshotDepthData = GetDepthArray(imageFrame);
                    double faceZ = (double)snapshotDepthData[dp.Y, dp.X] / 1000D;
                    using (System.IO.StreamWriter file = new System.IO.StreamWriter(@"data.txt"))
                        using (System.IO.StreamWriter colorfile = new System.IO.StreamWriter(@"colordata.txt"))
                            using (System.IO.StreamWriter fileReverse = new System.IO.StreamWriter(@"dataReverse.txt"))
                            {
                                for (int x = 0; x < snapshotDepthData.GetLength(1); x++)
                                {
                                    for (int y = 0; y < snapshotDepthData.GetLength(0); y++)
                                    {
                                        if (Math.Abs(x - dp.X) <= HEAD_SIZE_PIXELS && Math.Abs(y - dp.Y) <= HEAD_SIZE_PIXELS)
                                        {
                                            /*dp.X = x;
                                             * dp.Y = y;
                                             * ColorImagePoint c = cm.MapDepthPointToColorPoint(DepthImageFormat.Resolution640x480Fps30, dp, ColorImageFormat.RgbResolution640x480Fps30);
                                             * c = imageFrame.MapToColorImagePoint(x, y, ColorImageFormat.RgbResolution640x480Fps30);
                                             * Console.WriteLine("dp.X: " + dp.X +"dp.Y: " + dp.Y +"c.X: " + c.X +"c.Y: " + c.Y);
                                             * if(c.X < 640 && c.X >= 0 && c.Y < 480 && c.Y>=0)
                                             * {
                                             *  col = bmap.GetPixel(c.X, c.Y);
                                             * }
                                             * else
                                             * {
                                             *  col = bmap.GetPixel(nearX, nearY);
                                             * }*/

                                            /* if (Math.Abs(x - dp.X) <= 10 && Math.Abs(y - dp.Y) <= 10)
                                             * {
                                             *   col = Color.LightYellow;
                                             * }*/

                                            col = bmap.GetPixel(x + (cp.X - dp.X), y + (cp.Y - dp.Y));
                                            float r, g, b;
                                            r = (float)col.R;
                                            r = r / 255;
                                            g = (float)col.G;
                                            g = g / 255;
                                            b = (float)col.B;
                                            b = b / 255;
                                            double newX = -((double)((x - imageFrame.Width / 2) * HORIZONTAL_TAN * snapshotDepthData[y, x])) / (1000 * (double)(imageFrame.Width / 2));
                                            double newY = ((double)((y - imageFrame.Height / 2) * VERTICAL_TAN * snapshotDepthData[y, x]) / (1000 * (double)(imageFrame.Height / 2)));
                                            double newZ = (double)snapshotDepthData[y, x] / 1000D;
                                            if (Math.Abs(newZ - faceZ) <= HEAD_SIZE / 2)
                                            {
                                                r = 0;
                                                g = 0;
                                                b = 0;
                                                file.WriteLine(newX + " " + newY + " " + newZ);
                                                colorfile.WriteLine(newX + " " + newY + " " + newZ + " " + r.ToString() + " " + g.ToString() + " " + b.ToString() + " " + "1.0");
                                                String s = new String(new char[] { });
                                                s = newX + " " + newY + " " + newZ + " " + r.ToString() + " " + g.ToString() + " " + b.ToString() + " " + "1.0";
                                                slist.Add(s);
                                                pointarr[x + (int)HEAD_SIZE_PIXELS - dp.X, y + (int)HEAD_SIZE_PIXELS - dp.Y] = new Point(newX, newY, newZ, col);
                                                pointarr[x + (int)HEAD_SIZE_PIXELS - dp.X, y + (int)HEAD_SIZE_PIXELS - dp.Y].setArrX(x + (int)HEAD_SIZE_PIXELS - dp.X);
                                                pointarr[x + (int)HEAD_SIZE_PIXELS - dp.X, y + (int)HEAD_SIZE_PIXELS - dp.Y].setArrY(y + (int)HEAD_SIZE_PIXELS - dp.Y);
                                                pointarr[x + (int)HEAD_SIZE_PIXELS - dp.X, y + (int)HEAD_SIZE_PIXELS - dp.Y].setListInd(slist.Count - 1);
                                                plist.Add(pointarr[x + (int)HEAD_SIZE_PIXELS - dp.X, y + (int)HEAD_SIZE_PIXELS - dp.Y]);
                                            }
                                            else
                                            {
                                                pointarr[x + (int)HEAD_SIZE_PIXELS - dp.X, y + (int)HEAD_SIZE_PIXELS - dp.Y] = null;
                                            }
                                        }
                                    }
                                }
                            }
                    int     vert = slist.Count;
                    OffData dat  = new OffData(plist, pointarr);
                    dat.getFaces();
                    dat.writeToFile();

                    using (System.IO.StreamWriter fileoff = new System.IO.StreamWriter(@"fulldatacolor.off"))
                    {
                        fileoff.WriteLine("COFF");
                        fileoff.WriteLine(vert.ToString() + "\t0\t0");
                        int i = 0;
                        for (i = 0; i < vert; ++i)
                        {
                            fileoff.WriteLine(slist.ElementAt(i));
                        }
                    }
                    ProcessStartInfo startInfo = new ProcessStartInfo();
                    startInfo.FileName    = @"rcocone-win.exe";
                    startInfo.Arguments   = @"data.txt output";
                    startInfo.WindowStyle = ProcessWindowStyle.Hidden;
                    using (Process proc = Process.Start(startInfo))
                    {
                        proc.WaitForExit();

                        // Retrieve the app's exit code
                        //exitCode = proc.ExitCode;
                    }
                    //Process.Start(startInfo);
                    takeSnapshot = false;
                    block        = false;
                }

                imageFrame.Dispose();
            }
        }
コード例 #6
0
        private void KinectAllFramesReady(object sender, AllFramesReadyEventArgs e)
        {
            KinectSensor sensor = sender as KinectSensor;

            foreach (var skeletonCanvas in skeletonCanvases)
            {
                skeletonCanvas.Skeleton = null;
            }

            // Have we already been "shut down" by the user of this viewer,
            // or has the SkeletonStream been disabled since this event was posted?
            if ((null == KinectSensorManager) ||
                (null == sensor) ||
                (null == sensor.SkeletonStream) ||
                !sensor.SkeletonStream.IsEnabled)
            {
                return;
            }

            bool haveSkeletonData = false;

            using (SkeletonFrame skeletonFrame = e.OpenSkeletonFrame())
            {
                if (skeletonFrame != null)
                {
                    if ((skeletonData == null) || (skeletonData.Length != skeletonFrame.SkeletonArrayLength))
                    {
                        skeletonData = new Skeleton[skeletonFrame.SkeletonArrayLength];
                    }

                    skeletonFrame.CopySkeletonDataTo(skeletonData);

                    haveSkeletonData = true;
                }
            }

            if (haveSkeletonData)
            {
                ColorImageFormat colorFormat = ColorImageFormat.Undefined;
                int colorWidth  = 0;
                int colorHeight = 0;

                DepthImageFormat depthFormat = DepthImageFormat.Undefined;
                int depthWidth  = 0;
                int depthHeight = 0;

                switch (ImageType)
                {
                case ImageType.Color:
                    // Retrieve the current color format, from the frame if present, and from the sensor if not.
                    using (ColorImageFrame colorImageFrame = e.OpenColorImageFrame())
                    {
                        if (null != colorImageFrame)
                        {
                            colorFormat = colorImageFrame.Format;
                            colorWidth  = colorImageFrame.Width;
                            colorHeight = colorImageFrame.Height;
                        }
                        else if (null != sensor.ColorStream)
                        {
                            colorFormat = sensor.ColorStream.Format;
                            colorWidth  = sensor.ColorStream.FrameWidth;
                            colorHeight = sensor.ColorStream.FrameHeight;
                        }
                    }

                    break;

                case ImageType.Depth:
                    // Retrieve the current depth format, from the frame if present, and from the sensor if not.
                    using (DepthImageFrame depthImageFrame = e.OpenDepthImageFrame())
                    {
                        if (null != depthImageFrame)
                        {
                            depthFormat = depthImageFrame.Format;
                            depthWidth  = depthImageFrame.Width;
                            depthHeight = depthImageFrame.Height;
                        }
                        else if (null != sensor.DepthStream)
                        {
                            depthFormat = sensor.DepthStream.Format;
                            depthWidth  = sensor.DepthStream.FrameWidth;
                            depthHeight = sensor.DepthStream.FrameHeight;
                        }
                    }

                    break;
                }

                for (int i = 0; i < skeletonData.Length && i < skeletonCanvases.Count; i++)
                {
                    var skeleton       = skeletonData[i];
                    var skeletonCanvas = skeletonCanvases[i];
                    var jointMapping   = jointMappings[i];

                    jointMapping.Clear();

                    try
                    {
                        // Transform the data into the correct space
                        // For each joint, we determine the exact X/Y coordinates for the target view
                        foreach (Joint joint in skeleton.Joints)
                        {
                            Point mappedPoint = Get2DPosition(
                                sensor,
                                ImageType,
                                RenderSize,
                                joint.Position,
                                colorFormat,
                                colorWidth,
                                colorHeight,
                                depthFormat,
                                depthWidth,
                                depthHeight);

                            jointMapping[joint.JointType] = new JointMapping
                            {
                                Joint       = joint,
                                MappedPoint = mappedPoint
                            };
                        }
                    }
                    catch (UnauthorizedAccessException)
                    {
                        // Kinect is no longer available.
                        return;
                    }

                    // Look up the center point
                    Point centerPoint = Get2DPosition(
                        sensor,
                        ImageType,
                        RenderSize,
                        skeleton.Position,
                        colorFormat,
                        colorWidth,
                        colorHeight,
                        depthFormat,
                        depthWidth,
                        depthHeight);

                    // Scale the skeleton thickness
                    // 1.0 is the desired size at 640 width
                    double scale = RenderSize.Width / 640;

                    skeletonCanvas.Skeleton      = skeleton;
                    skeletonCanvas.JointMappings = jointMapping;
                    skeletonCanvas.Center        = centerPoint;
                    skeletonCanvas.ScaleFactor   = scale;
                }
            }
        }
コード例 #7
0
        /// <summary>
        /// Event handler for Kinect sensor's DepthFrameReady event
        /// </summary>
        /// <param name="sender">object sending the event</param>
        /// <param name="e">event arguments</param>
        private void SensorAllFramesReady(object sender, AllFramesReadyEventArgs e)
        {
            // in the middle of shutting down, so nothing to do
            if (null == this.sensor)
            {
                return;
            }

            bool depthReceived = false;
            bool colorReceived = false;

            using (DepthImageFrame depthFrame = e.OpenDepthImageFrame())
            {
                if (null != depthFrame)
                {
                    // Copy the pixel data from the image to a temporary array
                    depthFrame.CopyDepthImagePixelDataTo(this.depthPixels);

                    depthReceived = true;
                }
            }

            using (ColorImageFrame colorFrame = e.OpenColorImageFrame())
            {
                if (null != colorFrame)
                {
                    // Copy the pixel data from the image to a temporary array
                    colorFrame.CopyPixelDataTo(this.colorPixels);

                    colorReceived = true;
                }
            }

            // do our processing outside of the using block
            // so that we return resources to the kinect as soon as possible
            if (true == depthReceived)
            {
                this.sensor.CoordinateMapper.MapDepthFrameToColorFrame(
                    DepthFormat,
                    this.depthPixels,
                    ColorFormat,
                    this.colorCoordinates);

                Array.Clear(this.greenScreenPixelData, 0, this.greenScreenPixelData.Length);

                // loop over each row and column of the depth
                for (int y = 0; y < this.depthHeight; ++y)
                {
                    for (int x = 0; x < this.depthWidth; ++x)
                    {
                        // calculate index into depth array
                        int depthIndex = x + (y * this.depthWidth);

                        DepthImagePixel depthPixel = this.depthPixels[depthIndex];

                        int player = depthPixel.PlayerIndex;

                        // if we're tracking a player for the current pixel, do green screen
                        if (player > 0)
                        {
                            // retrieve the depth to color mapping for the current depth pixel
                            ColorImagePoint colorImagePoint = this.colorCoordinates[depthIndex];

                            // scale color coordinates to depth resolution
                            int colorInDepthX = colorImagePoint.X / this.colorToDepthDivisor;
                            int colorInDepthY = colorImagePoint.Y / this.colorToDepthDivisor;

                            // make sure the depth pixel maps to a valid point in color space
                            // check y > 0 and y < depthHeight to make sure we don't write outside of the array
                            // check x > 0 instead of >= 0 since to fill gaps we set opaque current pixel plus the one to the left
                            // because of how the sensor works it is more correct to do it this way than to set to the right
                            if (colorInDepthX > 0 && colorInDepthX < this.depthWidth && colorInDepthY >= 0 && colorInDepthY < this.depthHeight)
                            {
                                // calculate index into the green screen pixel array
                                int greenScreenIndex = colorInDepthX + (colorInDepthY * this.depthWidth);

                                // set opaque
                                this.greenScreenPixelData[greenScreenIndex] = opaquePixelValue;

                                // compensate for depth/color not corresponding exactly by setting the pixel
                                // to the left to opaque as well
                                this.greenScreenPixelData[greenScreenIndex - 1] = opaquePixelValue;
                            }
                        }
                    }
                }
            }

            // do our processing outside of the using block
            // so that we return resources to the kinect as soon as possible
            if (true == colorReceived)
            {
                // Write the pixel data into our bitmap
                this.colorBitmap.WritePixels(
                    new Int32Rect(0, 0, this.colorBitmap.PixelWidth, this.colorBitmap.PixelHeight),
                    this.colorPixels,
                    this.colorBitmap.PixelWidth * sizeof(int),
                    0);

                if (this.playerOpacityMaskImage == null)
                {
                    this.playerOpacityMaskImage = new WriteableBitmap(
                        this.depthWidth,
                        this.depthHeight,
                        96,
                        96,
                        PixelFormats.Bgra32,
                        null);

                    MaskedColor.OpacityMask = new ImageBrush {
                        ImageSource = this.playerOpacityMaskImage
                    };
                }

                this.playerOpacityMaskImage.WritePixels(
                    new Int32Rect(0, 0, this.depthWidth, this.depthHeight),
                    this.greenScreenPixelData,
                    this.depthWidth * ((this.playerOpacityMaskImage.Format.BitsPerPixel + 7) / 8),
                    0);
            }
        }
コード例 #8
0
        /// <summary>
        /// Handles the Kinect AllFramesReady event
        /// </summary>
        private void Sensor_AllFramesReady(object sender, AllFramesReadyEventArgs e)
        {
            ColorImageFrame colorFrame = null;
            DepthImageFrame depthFrame = null;

            Skeleton[] skeletonData;
            EmailAlert Alert = new EmailAlert();

            try
            {
                colorFrame = e.OpenColorImageFrame();
                depthFrame = e.OpenDepthImageFrame();

                using (var skeletonFrame = e.OpenSkeletonFrame())
                {
                    if (colorFrame == null || depthFrame == null || skeletonFrame == null)
                    {
                        return;
                    }

                    skeletonData = new Skeleton[skeletonFrame.SkeletonArrayLength];
                    skeletonFrame.CopySkeletonDataTo(skeletonData);
                }

                // Find a skeleton to track.
                // First see if our old one is good.
                // When a skeleton is in PositionOnly tracking state, don't pick a new one
                // as it may become fully tracked again.
                Skeleton skeletonOfInterest = skeletonData.FirstOrDefault(s => s.TrackingId == this.trackedSkeletonId && s.TrackingState != SkeletonTrackingState.NotTracked);

                if (skeletonOfInterest == null)
                {
                    // Old one wasn't around.  Find any skeleton that is being tracked and use it.
                    skeletonOfInterest = skeletonData.FirstOrDefault(s => s.TrackingState == SkeletonTrackingState.Tracked);

                    if (skeletonOfInterest != null)
                    {
                        this.trackedSkeletonId = skeletonOfInterest.TrackingId;

                        if (FirstAlert)
                        {
                            Alert.SendAlert(colorFrame);
                            FirstAlert = false;
                        }
                    }
                }

                if (this.FrameDataUpdated != null)
                {
                    this.FrameDataUpdated(this, new FrameData(colorFrame, depthFrame, skeletonOfInterest));
                }
            }
            finally
            {
                if (colorFrame != null)
                {
                    colorFrame.Dispose();
                }


                if (depthFrame != null)
                {
                    depthFrame.Dispose();
                }
            }
        }
コード例 #9
0
        private void Sensor_AllFrameReady(object sender, AllFramesReadyEventArgs e)
        {
            //ColorImageFrame
            ColorImageFrame colorImageFrame = e.OpenColorImageFrame();

            if (colorImageFrame == null)
            {
                return;
            }
            byte[] colorDataArray = new byte[colorImageFrame.PixelDataLength];
            colorImageFrame.CopyPixelDataTo(colorDataArray);
            //ColorImageFrame
            colorImageFrame.Dispose();
            //DepthImageFrame
            DepthImageFrame depthImageFrame = e.OpenDepthImageFrame();

            if (depthImageFrame == null)
            {
                return;
            }
            //(PixelDataLength = 640*480)
            DepthImagePixel[] depthImgPixArray = new DepthImagePixel[depthImageFrame.PixelDataLength];
            depthImageFrame.CopyDepthImagePixelDataTo(depthImgPixArray);
            //ColorImageFrame
            depthImageFrame.Dispose();
            byte[]            userColorArray = new byte[_sensor.ColorStream.FramePixelDataLength];
            ColorImagePoint[] colImgPntArray
                = new ColorImagePoint[_sensor.DepthStream.FrameHeight *
                                      _sensor.DepthStream.FrameWidth];
            //colorImagePoints
            _sensor.CoordinateMapper.MapDepthFrameToColorFrame(depthImageFrame.Format,
                                                               depthImgPixArray, colorImageFrame.Format, colImgPntArray);
            for (int i = 0; i < depthImgPixArray.Length; i++)
            {
                if (depthImgPixArray[i].PlayerIndex == 0)
                {
                    continue;
                }
                ColorImagePoint colorImagePoint = colImgPntArray[i];
                if (colorImagePoint.X >= _sensor.ColorStream.FrameWidth || colorImagePoint.X < 0 ||
                    colorImagePoint.Y >= _sensor.ColorStream.FrameHeight || colorImagePoint.Y < 0)
                {
                    continue;
                }
                int colorDataIndex =
                    ((colorImagePoint.Y * _sensor.ColorStream.FrameWidth) + colorImagePoint.X)
                    * _sensor.ColorStream.FrameBytesPerPixel;
                userColorArray[colorDataIndex]     = colorDataArray[colorDataIndex];
                userColorArray[colorDataIndex + 1] = colorDataArray[colorDataIndex + 1];
                userColorArray[colorDataIndex + 2] = colorDataArray[colorDataIndex + 2];
                userColorArray[colorDataIndex + 3] = 255;
                userColorArray[colorDataIndex - _sensor.ColorStream.FrameBytesPerPixel]
                    = colorDataArray[colorDataIndex - _sensor.ColorStream.FrameBytesPerPixel];
                userColorArray[colorDataIndex - _sensor.ColorStream.FrameBytesPerPixel + 1]
                    = colorDataArray[colorDataIndex - _sensor.ColorStream.FrameBytesPerPixel + 1];
                userColorArray[colorDataIndex - _sensor.ColorStream.FrameBytesPerPixel + 2]
                    = colorDataArray[colorDataIndex - _sensor.ColorStream.FrameBytesPerPixel + 2];
                userColorArray[colorDataIndex - _sensor.ColorStream.FrameBytesPerPixel + 3] = 255;
            }
            BitmapSource bitmapSource = BitmapSource.Create(
                _sensor.ColorStream.FrameWidth,
                _sensor.ColorStream.FrameHeight,
                96,                  //dpi
                96,                  //dpi
                PixelFormats.Bgra32, //
                null,
                userColorArray,
                _sensor.ColorStream.FrameWidth *
                _sensor.ColorStream.FrameBytesPerPixel);

            image1.Source = bitmapSource;
        }
コード例 #10
0
        /// <summary>
        /// Handles the AllFramesReady event of the kinectSensor control.
        /// </summary>
        /// <param name="sender">The source of the event.</param>
        /// <param name="e">The <see cref="Microsoft.Kinect.AllFramesReadyEventArgs"/> instance containing the event data.</param>
        void kinectSensor_AllFramesReady(object sender, AllFramesReadyEventArgs e)
        {
            // Retrieve each single frame and copy the data
            using (ColorImageFrame colorImageFrame = e.OpenColorImageFrame())
            {
                if (colorImageFrame == null)
                {
                    return;
                }
                colorImageFrame.CopyPixelDataTo(colorPixelData);
            }

            using (DepthImageFrame depthImageFrame = e.OpenDepthImageFrame())
            {
                if (depthImageFrame == null)
                {
                    return;
                }
                depthImageFrame.CopyPixelDataTo(depthPixelData);
            }

            using (SkeletonFrame skeletonFrame = e.OpenSkeletonFrame())
            {
                if (skeletonFrame == null)
                {
                    return;
                }
                skeletonFrame.CopySkeletonDataTo(skeletonData);
            }

            // Retrieve the first tracked skeleton if any. Otherwise, do nothing.
            var skeleton = skeletonData.FirstOrDefault(s => s.TrackingState == SkeletonTrackingState.Tracked);

            if (skeleton == null)
            {
                return;
            }

            // Make the faceTracker processing the data.
            FaceTrackFrame faceFrame = faceTracker.Track(kinectSensor.ColorStream.Format, colorPixelData,
                                                         kinectSensor.DepthStream.Format, depthPixelData,
                                                         skeleton);

            // If a face is tracked, then we can use it.
            if (faceFrame.TrackSuccessful)
            {
                var triangles = faceFrame.GetTriangles();
                // Retrieve only the Animation Units coeffs.
                var AUCoeff = faceFrame.GetAnimationUnitCoefficients();

                var jawLowerer = AUCoeff[AnimationUnit.JawLower];
                jawLowerer = jawLowerer < 0 ? 0 : jawLowerer;
                MouthScaleTransform.ScaleY = jawLowerer * 5 + 0.1;
                MouthScaleTransform.ScaleX = (AUCoeff[AnimationUnit.LipStretcher] + 1);

                LeftBrow.Y            = RightBrow.Y = (AUCoeff[AnimationUnit.BrowLower]) * 40;
                RightBrowRotate.Angle = (AUCoeff[AnimationUnit.BrowRaiser] * 20);
                LeftBrowRotate.Angle  = -RightBrowRotate.Angle;
                CanvasRotate.Angle    = -faceFrame.Rotation.Z;
                // CanvasTranslate.X = faceFrame.Translation.X;
                // CanvasTranslate.Y = faceFrame.Translation.Y;

                if (logToFile)
                {
                    writeToFile(filename_txt.Text, faceFrame);
                }

                if (writeToOSC)
                {
                    sendOsc(osc_channel_txt.Text, faceFrame, oscWriter);
                }
            }
        }
コード例 #11
0
        /// <summary>
        /// Event handler for Kinect sensor's DepthFrameReady event
        /// </summary>
        /// <param name="sender">object sending the event</param>
        /// <param name="e">event arguments</param>
        private void SensorAllFramesReady(object sender, AllFramesReadyEventArgs e)
        {
            // in the middle of shutting down, so nothing to do
            if (null == this.sensor)
            {
                return;
            }

            bool depthReceived = false;
            bool colorReceived = false;

            using (DepthImageFrame depthFrame = e.OpenDepthImageFrame())
            {
                if (null != depthFrame)
                {
                    // Copy the pixel data from the image to a temporary array
                    depthFrame.CopyDepthImagePixelDataTo(this.depthPixels);

                    depthReceived = true;
                }
            }

            Skeleton[] skeletons = new Skeleton[0];

            using (SkeletonFrame skeletonFrame = e.OpenSkeletonFrame())
            {
                if (skeletonFrame != null)
                {
                    skeletons = new Skeleton[skeletonFrame.SkeletonArrayLength];
                    skeletonFrame.CopySkeletonDataTo(skeletons);
                }
            }

            using (DrawingContext dc = this.drawingGroup.Open())
            {
                // Draw a transparent background to set the render size
                dc.DrawRectangle(Brushes.Transparent, null, new Rect(0.0, 0.0, RenderWidth, RenderHeight));

                if (skeletons.Length != 0)
                {
                    foreach (Skeleton skel in skeletons)
                    {
                        if (skel.TrackingState == SkeletonTrackingState.Tracked)
                        {
                            this.seleccion_sonido(skel);
                        }
                        else if (skel.TrackingState == SkeletonTrackingState.PositionOnly)
                        {
                            dc.DrawEllipse(
                                this.centerPointBrush,
                                null,
                                this.SkeletonPointToScreen(skel.Position),
                                BodyCenterThickness,
                                BodyCenterThickness);
                        }
                    }
                }

                // prevent drawing outside of our render area
                this.drawingGroup.ClipGeometry = new RectangleGeometry(new Rect(0.0, 0.0, RenderWidth, RenderHeight));
            }

            //Llamamos a la función que controla los estados por los que hay que pasar
            detectar_estado(skeletons);

            using (ColorImageFrame colorFrame = e.OpenColorImageFrame())
            {
                if (null != colorFrame)
                {
                    // Copy the pixel data from the image to a temporary array
                    colorFrame.CopyPixelDataTo(this.colorPixels);

                    colorReceived = true;
                }
            }

            // do our processing outside of the using block
            // so that we return resources to the kinect as soon as possible
            if (true == depthReceived)
            {
                this.sensor.CoordinateMapper.MapDepthFrameToColorFrame(
                    DepthFormat,
                    this.depthPixels,
                    ColorFormat,
                    this.colorCoordinates);

                Array.Clear(this.playerPixelData, 0, this.playerPixelData.Length);

                // loop over each row and column of the depth
                for (int y = 0; y < this.depthHeight; ++y)
                {
                    for (int x = 0; x < this.depthWidth; ++x)
                    {
                        // calculate index into depth array
                        int depthIndex = x + (y * this.depthWidth);

                        DepthImagePixel depthPixel = this.depthPixels[depthIndex];

                        int player = depthPixel.PlayerIndex;

                        // if we're tracking a player for the current pixel, sets it opacity to full
                        if (player > 0)
                        {
                            // retrieve the depth to color mapping for the current depth pixel
                            ColorImagePoint colorImagePoint = this.colorCoordinates[depthIndex];

                            // scale color coordinates to depth resolution
                            int colorInDepthX = colorImagePoint.X / this.colorToDepthDivisor;
                            int colorInDepthY = colorImagePoint.Y / this.colorToDepthDivisor;

                            // make sure the depth pixel maps to a valid point in color space
                            // check y > 0 and y < depthHeight to make sure we don't write outside of the array
                            // check x > 0 instead of >= 0 since to fill gaps we set opaque current pixel plus the one to the left
                            // because of how the sensor works it is more correct to do it this way than to set to the right
                            if (colorInDepthX > 0 && colorInDepthX < this.depthWidth && colorInDepthY >= 0 && colorInDepthY < this.depthHeight)
                            {
                                // calculate index into the player mask pixel array
                                int playerPixelIndex = colorInDepthX + (colorInDepthY * this.depthWidth);

                                // set opaque
                                this.playerPixelData[playerPixelIndex] = opaquePixelValue;

                                // compensate for depth/color not corresponding exactly by setting the pixel
                                // to the left to opaque as well
                                this.playerPixelData[playerPixelIndex - 1] = opaquePixelValue;
                            }
                        }
                    }
                }
            }

            // do our processing outside of the using block
            // so that we return resources to the kinect as soon as possible
            if (true == colorReceived)
            {
                // Write the pixel data into our bitmap
                this.colorBitmap.WritePixels(
                    new Int32Rect(0, 0, this.colorBitmap.PixelWidth, this.colorBitmap.PixelHeight),
                    this.colorPixels,
                    this.colorBitmap.PixelWidth * sizeof(int),
                    0);

                if (this.playerOpacityMaskImage == null)
                {
                    this.playerOpacityMaskImage = new WriteableBitmap(
                        this.depthWidth,
                        this.depthHeight,
                        96,
                        96,
                        PixelFormats.Bgra32,
                        null);

                    MaskedColor.OpacityMask = new ImageBrush {
                        ImageSource = this.playerOpacityMaskImage
                    };
                }

                this.playerOpacityMaskImage.WritePixels(
                    new Int32Rect(0, 0, this.depthWidth, this.depthHeight),
                    this.playerPixelData,
                    this.depthWidth * ((this.playerOpacityMaskImage.Format.BitsPerPixel + 7) / 8),
                    0);
            }
        }
コード例 #12
0
ファイル: MainWindow.xaml.cs プロジェクト: leezhm/ChromaKey
        private void AllFramesReadyEventHandler(object sender, AllFramesReadyEventArgs args)
        {
            using (ColorImageFrame ciFrame = args.OpenColorImageFrame())
            {
                if (null != ciFrame)
                {
                    ciFrame.CopyPixelDataTo(this.ColorPixels);

                    ColorBitmap.WritePixels(new Int32Rect(0, 0, ColorWidth, ColorHeight),
                                            ColorPixels, ColorWidth * sizeof(int), 0);
                }
            }

            using (DepthImageFrame diFrame = args.OpenDepthImageFrame())
            {
                if (null != diFrame)
                {
                    diFrame.CopyPixelDataTo(this.DepthDatas);
                }
                else
                {
                    return;
                }
            }

            // Clear
            //Array.Clear(PlayerPixels, 0, PlayerPixels.Length);
            //System.Threading.Tasks.Parallel.For(0, PlayerPixels.Length, index =>
            //    {
            //        PlayerPixels[index] = 200;
            //    });

            Array.Clear(CIP, 0, CIP.Length);

            gSensor.MapDepthFrameToColorFrame(DIF, DepthDatas, CIF, CIP);

            byte[] pixels = new byte[gSensor.DepthStream.FramePixelDataLength * sizeof(int)];

            // Fill the Player Image
            for (int hIndex = 0; hIndex < DepthHeight; ++hIndex)
            {
                for (int wIndex = 0; wIndex < DepthWidth; ++wIndex)
                {
                    int index = wIndex + hIndex * DepthWidth;
                    //int player = DepthDatas[index] & DepthImageFrame.PlayerIndexBitmask;

                    if (0 < (DepthDatas[index] & DepthImageFrame.PlayerIndexBitmask)) // Just for Player
                    {
                        ColorImagePoint cip = CIP[index];

                        // scale color coordinates to depth resolution
                        int colorInDepthX = (int)(cip.X / this.Divisor);
                        int colorInDepthY = (int)(cip.Y / this.Divisor);

                        if (colorInDepthX > 0 && colorInDepthX < this.DepthWidth &&
                            colorInDepthY >= 0 && colorInDepthY < this.DepthHeight)
                        {
                            // calculate index into the green screen pixel array
                            int playerIndex = (colorInDepthX + (colorInDepthY * this.DepthWidth)) << 2;
                            int colorIndex  = (cip.X + cip.Y * ColorWidth) << 2;

                            pixels[playerIndex]     = ColorPixels[colorIndex]; //BitConverter.ToInt32(ColorPixels, colorIndex);
                            pixels[playerIndex + 1] = ColorPixels[colorIndex + 1];
                            pixels[playerIndex + 2] = ColorPixels[colorIndex + 2];
                            pixels[playerIndex + 3] = ColorPixels[colorIndex + 3];

                            --playerIndex;
                            --colorIndex;

                            pixels[playerIndex]     = ColorPixels[colorIndex]; //BitConverter.ToInt32(ColorPixels, colorIndex);
                            pixels[playerIndex + 1] = ColorPixels[colorIndex + 1];
                            pixels[playerIndex + 2] = ColorPixels[colorIndex + 2];
                            pixels[playerIndex + 3] = ColorPixels[colorIndex + 3];
                        }

                        HadPlayer = true;
                    }
                    //else
                    //{
                    //    HadPlayer = false;
                    //}
                }
            }

            lock (gLock)
            {
                // Enqueue
                //PixelsQueue.Enqueue(pixels);
                //Average.ResetQueue(PixelsQueue, 3);

                PixelsLinkedList.AddLast(pixels);
                Average.ResetLinkedList(PixelsLinkedList, 3);
            }

            // Smoothen
            if (null == smooth && HadPlayer)
            {
                Color bg = new Color();
                bg.B = bg.G = bg.R = 0;

                // Gaussian
                //smooth = new GaussianFilter(DepthWidth, DepthHeight, PixelFormats.Bgr32, bg);

                // Bilateral
                smooth = new BilateralFilter(DepthWidth, DepthHeight, PixelFormats.Bgr32);

                // Median
                smooth2 = new GenericMedian(DepthWidth, DepthHeight, PixelFormats.Bgr32, bg, 3);

                median = new AForge.Imaging.Filters.Median(5);

                if (null == globalBWorker)
                {
                    globalBWorker         = new BackgroundWorker();
                    globalBWorker.DoWork += DoWorking;

                    globalBWorker.RunWorkerAsync();
                }
            }

            ////PlayerBitmap.WritePixels(new Int32Rect(0, 0, DepthWidth, DepthHeight),
            ////    PlayerPixels, DepthWidth * ((PlayerBitmap.Format.BitsPerPixel + 7) / 8), 0);
        }
コード例 #13
0
        private void OnAllFramesReady(object sender, AllFramesReadyEventArgs allFramesReadyEventArgs)
        {
            ColorImageFrame colorImageFrame = null;
            DepthImageFrame depthImageFrame = null;
            SkeletonFrame   skeletonFrame   = null;

            try
            {
                colorImageFrame = allFramesReadyEventArgs.OpenColorImageFrame();
                depthImageFrame = allFramesReadyEventArgs.OpenDepthImageFrame();
                skeletonFrame   = allFramesReadyEventArgs.OpenSkeletonFrame();

                if (colorImageFrame == null || depthImageFrame == null || skeletonFrame == null)
                {
                    return;
                }

                // Check for image format changes.  The FaceTracker doesn't
                // deal with that so we need to reset.
                if (this.depthImageFormat != depthImageFrame.Format)
                {
                    this.resetFaceTracking();
                    this.depthImage       = null;
                    this.depthImageFormat = depthImageFrame.Format;
                }

                if (this.colorImageFormat != colorImageFrame.Format)
                {
                    this.resetFaceTracking();
                    this.colorImage       = null;
                    this.colorImageFormat = colorImageFrame.Format;
                }

                // Create any buffers to store copies of the data we work with
                if (this.depthImage == null)
                {
                    this.depthImage = new short[depthImageFrame.PixelDataLength];
                }

                if (this.colorImage == null)
                {
                    this.colorImage = new byte[colorImageFrame.PixelDataLength];
                }

                // Get the skeleton information
                if (this.skeletonData == null || this.skeletonData.Length != skeletonFrame.SkeletonArrayLength)
                {
                    this.skeletonData = new Skeleton[skeletonFrame.SkeletonArrayLength];
                }

                colorImageFrame.CopyPixelDataTo(this.colorImage);
                depthImageFrame.CopyPixelDataTo(this.depthImage);
                skeletonFrame.CopySkeletonDataTo(this.skeletonData);

                // Update the list of trackers and the trackers with the current frame information
                foreach (Skeleton skeleton in this.skeletonData)
                {
                    if (skeleton.TrackingState == SkeletonTrackingState.Tracked ||
                        skeleton.TrackingState == SkeletonTrackingState.PositionOnly)
                    {
                        try
                        {
                            this.trackedSkeleton.OnFrameReady(this.Kinect, colorImageFormat, colorImage, depthImageFormat, depthImage, skeleton);
                        }
                        catch (NullReferenceException)
                        {
                            //se si perder il tracking del viso si evita un crash
                        }
                        this.trackedSkeleton.LastTrackedFrame = skeletonFrame.FrameNumber;
                    }
                }

                this.InvalidateVisual();
            }
            finally
            {
                if (colorImageFrame != null)
                {
                    colorImageFrame.Dispose();
                }

                if (depthImageFrame != null)
                {
                    depthImageFrame.Dispose();
                }

                if (skeletonFrame != null)
                {
                    skeletonFrame.Dispose();
                }
            }
        }
コード例 #14
0
        private void AllFramesReady(object sender, AllFramesReadyEventArgs allFramesReadyEventArgs)
        {
            ColorImageFrame colorImageFrame = null;
            DepthImageFrame depthImageFrame = null;
            SkeletonFrame   skeletonFrame   = null;

            try
            {
                colorImageFrame = allFramesReadyEventArgs.OpenColorImageFrame();
                depthImageFrame = allFramesReadyEventArgs.OpenDepthImageFrame();
                skeletonFrame   = allFramesReadyEventArgs.OpenSkeletonFrame();

                if (colorImageFrame == null || depthImageFrame == null || skeletonFrame == null)
                {
                    return;
                }

                // Check for changes in any of the data this function is receiving
                // and reset things appropriately.
                if (this.depthImageFormat != depthImageFrame.Format)
                {
                    this.DestroyFaceTracker();
                    this.depthImage       = null;
                    this.depthImageFormat = depthImageFrame.Format;
                }

                if (this.colorImageFormat != colorImageFrame.Format)
                {
                    this.DestroyFaceTracker();
                    this.colorImage               = null;
                    this.colorImageFormat         = colorImageFrame.Format;
                    this.colorImageWritableBitmap = null;
                    this.ColorImage.Source        = null;
                    this.theMaterial.Brush        = null;
                }

                if (this.skeletonData != null && this.skeletonData.Length != skeletonFrame.SkeletonArrayLength)
                {
                    this.skeletonData = null;
                }

                // Create any buffers to store copies of the data we work with
                if (this.depthImage == null)
                {
                    this.depthImage = new short[depthImageFrame.PixelDataLength];
                }

                if (this.colorImage == null)
                {
                    this.colorImage = new byte[colorImageFrame.PixelDataLength];
                }

                if (this.colorImageWritableBitmap == null)
                {
                    this.colorImageWritableBitmap = new WriteableBitmap(
                        colorImageFrame.Width, colorImageFrame.Height, 96, 96, PixelFormats.Bgr32, null);
                    this.ColorImage.Source = this.colorImageWritableBitmap;
                    this.theMaterial.Brush = new ImageBrush(this.colorImageWritableBitmap)
                    {
                        ViewportUnits = BrushMappingMode.Absolute
                    };
                }

                if (this.skeletonData == null)
                {
                    this.skeletonData = new Skeleton[skeletonFrame.SkeletonArrayLength];
                }

                // Copy data received in this event to our buffers.
                colorImageFrame.CopyPixelDataTo(this.colorImage);
                depthImageFrame.CopyPixelDataTo(this.depthImage);
                skeletonFrame.CopySkeletonDataTo(this.skeletonData);
                this.colorImageWritableBitmap.WritePixels(
                    new Int32Rect(0, 0, colorImageFrame.Width, colorImageFrame.Height),
                    this.colorImage,
                    colorImageFrame.Width * Bgr32BytesPerPixel,
                    0);

                // Find a skeleton to track.
                // First see if our old one is good.
                // When a skeleton is in PositionOnly tracking state, don't pick a new one
                // as it may become fully tracked again.
                Skeleton skeletonOfInterest =
                    this.skeletonData.FirstOrDefault(
                        skeleton =>
                        skeleton.TrackingId == this.trackingId &&
                        skeleton.TrackingState != SkeletonTrackingState.NotTracked);

                if (skeletonOfInterest == null)
                {
                    // Old one wasn't around.  Find any skeleton that is being tracked and use it.
                    skeletonOfInterest =
                        this.skeletonData.FirstOrDefault(
                            skeleton => skeleton.TrackingState == SkeletonTrackingState.Tracked);

                    if (skeletonOfInterest != null)
                    {
                        // This may be a different person so reset the tracker which
                        // could have tuned itself to the previous person.
                        if (this.faceTracker != null)
                        {
                            this.faceTracker.ResetTracking();
                        }

                        this.trackingId = skeletonOfInterest.TrackingId;
                    }
                }

                if (skeletonOfInterest != null && skeletonOfInterest.TrackingState == SkeletonTrackingState.Tracked)
                {
                    if (this.faceTracker == null)
                    {
                        try
                        {
                            this.faceTracker = new FaceTracker(this.Kinect);
                        }
                        catch (InvalidOperationException)
                        {
                            // During some shutdown scenarios the FaceTracker
                            // is unable to be instantiated.  Catch that exception
                            // and don't track a face.
                            Debug.WriteLine("AllFramesReady - creating a new FaceTracker threw an InvalidOperationException");
                            this.faceTracker = null;
                        }
                    }

                    if (this.faceTracker != null)
                    {
                        FaceTrackFrame faceTrackFrame = this.faceTracker.Track(
                            this.colorImageFormat,
                            this.colorImage,
                            this.depthImageFormat,
                            this.depthImage,
                            skeletonOfInterest);

                        if (faceTrackFrame.TrackSuccessful)
                        {
                            if (!visited)
                            {
                                visited = true;
                                //counter.Text = "60 seconds";
                                aTimer.Interval = 1000;
                                aTimer.Tick    += new EventHandler(aTimer_Tick);
                                aTimer.Start();
                            }
                            if (saveModel)
                            {
                                saveDepthImagebmp(depthImageFrame);
                                saveColorImage(colorImageFrame.Width, colorImageFrame.Height, (colorImageFrame.Width * Bgr32BytesPerPixel));
                                saveFaceModel();
                            }
                        }
                    }
                }
                else
                {
                    this.trackingId = -1;
                }
            }
            finally
            {
                if (colorImageFrame != null)
                {
                    colorImageFrame.Dispose();
                }

                if (depthImageFrame != null)
                {
                    depthImageFrame.Dispose();
                }

                if (skeletonFrame != null)
                {
                    skeletonFrame.Dispose();
                }
            }
        }
コード例 #15
0
 /// <summary>
 /// Initializes a new instance of the FrameData class
 /// </summary>
 public FrameData(ColorImageFrame colorFrame, DepthImageFrame depthFrame, Skeleton trackedSkeleton)
 {
     this.ColorFrame      = colorFrame;
     this.DepthFrame      = depthFrame;
     this.TrackedSkeleton = trackedSkeleton;
 }
コード例 #16
0
ファイル: MainWindow.xaml.cs プロジェクト: dtbinh/cs161grp1
        void sensor_AllFramesReady(object sender, AllFramesReadyEventArgs e)
        {
            if ((playback || recordSet) && countdown < 90)
            {
                if (countdown == 0)
                {
                    Countdown.Text = "3";
                }
                else if (countdown == 30)
                {
                    Countdown.Text = "2";
                }
                else if (countdown == 60)
                {
                    Countdown.Text = "1";
                }
                countdown++;
                return;
            }
            else
            {
                Countdown.Text = "";
                if (playback)
                {
                    changeVisibility(System.Windows.Visibility.Visible);
                }
            }


            if (playback == true)
            {
                using (ColorImageFrame colorFrame = e.OpenColorImageFrame())
                {
                    if (colorFrame == null)
                    {
                        return;
                    }
                    playbackFrameNumber++;
                    byte[] pixels = new byte[colorFrame.PixelDataLength];
                    dataStream.Read(pixels, 0, colorFrame.PixelDataLength);
                    int stride = colorFrame.Width * 4;
                    masterView.Source = BitmapSource.Create(colorFrame.Width, colorFrame.Height, 96, 96, PixelFormats.Bgr32, null, pixels, stride);


                    if (totalCounted != 0)
                    {
                        String playerFeedback;
                        if (playbackFrameNumber % 30 == 0)
                        {
                            playerPercentage = Math.Round((totalCorrespondence / totalCounted) * 100);
                            if (playerPercentage < 20)
                            {
                                playerFeedback = "You're terrible";
                            }
                            else if (playerPercentage < 40)
                            {
                                playerFeedback = "You could do better";
                            }
                            else if (playerPercentage < 60)
                            {
                                playerFeedback = "Getting there";
                            }
                            else if (playerPercentage < 80)
                            {
                                playerFeedback = "Not bad";
                            }
                            else if (playerPercentage < 100)
                            {
                                playerFeedback = "Great job!";
                            }
                            else
                            {
                                playerFeedback = "Are you even real?";
                            }

                            /*
                             * else
                             * {
                             *  playerFeedback = playerPercentage.ToString() + "%";
                             * }
                             */

                            // update display
                            if (!textBlock3.Text.Equals(playerFeedback))
                            {
                                //speech.Speak(playerFeedback);
                                textBlock3.Text = playerFeedback;
                            }
                            textPercentage.Text = playerPercentage.ToString() + "%";
                        }
                    }

                    if (dataStream.Position == streamLength)
                    {
                        countdown = 0;
                        playback  = false;
                        dataStream.Close();
                        // swap image
                        BitmapImage bitmap = new BitmapImage();
                        bitmap.BeginInit();
                        bitmap.UriSource = new Uri("/VirtualSifu;component/Images/play.png", UriKind.Relative);
                        bitmap.EndInit();
                        image4.Stretch = Stretch.Fill;
                        image4.Source  = bitmap;

                        // undim record button
                        bitmap = new BitmapImage();
                        bitmap.BeginInit();
                        bitmap.UriSource = new Uri("/VirtualSifu;component/Images/record.png", UriKind.Relative);
                        bitmap.EndInit();
                        image2.Stretch = Stretch.Fill;
                        image2.Source  = bitmap;

                        masterView.Source = null;
                        changeVisibility(System.Windows.Visibility.Hidden);
                    }



                    using (SkeletonFrame skeletonFrame = e.OpenSkeletonFrame())
                    {
                        if (skeletonFrame != null)
                        {
                            Skeleton[] data = new Skeleton[skeletonFrame.SkeletonArrayLength];
                            skeletonFrame.CopySkeletonDataTo(data);

                            foreach (Skeleton skeleton in data)
                            {
                                if (skeleton.TrackingState == SkeletonTrackingState.Tracked)
                                {
                                    SkeletonPoint studentPoint;
                                    foreach (String joint in jointsTracked)
                                    {
                                        studentPoint = getJoint(joint, skeleton).Position;
                                        ((ArrayList)studentData.Get(joint)).Insert(playbackFrameNumber % 30, new JointData(studentPoint.X, studentPoint.Y, studentPoint.Z));
                                    }

                                    ArrayList dtwData = new ArrayList();

                                    if (playbackFrameNumber != 0 && playbackFrameNumber % 30 == 0)
                                    {
                                        //run DTW for each joint

                                        dtwData = runDTW3();
                                        Console.Write((double)dtwData[0] + "\n");
                                        colorJoint(ankleRight, (double)dtwData[0]);
                                        colorJoint(ankleLeft, (double)dtwData[1]);
                                        colorJoint(kneeRight, (double)dtwData[2]);
                                        colorJoint(kneeLeft, (double)dtwData[3]);
                                        colorJoint(hipRight, (double)dtwData[4]);
                                        colorJoint(hipLeft, (double)dtwData[5]);
                                        colorJoint(shoulderRight, (double)dtwData[6]);
                                        colorJoint(shoulderLeft, (double)dtwData[7]);
                                        colorJoint(elbowRight, (double)dtwData[8]);
                                        colorJoint(elbowLeft, (double)dtwData[9]);
                                        colorJoint(wristRight, (double)dtwData[10]);
                                        colorJoint(wristLeft, (double)dtwData[11]);

                                        //colorJoint(ellipse, random.Next(0, 4));
                                        //Probably can do this part like Gina's
                                        //Get a joint list that you want calculated
                                        //perform runDTW on each individual joint
                                        //then do your coloring/drawing for it.
                                    }


                                    ScalePosition(wristRight, skeleton.Joints[JointType.WristRight]);
                                    ScalePosition(wristLeft, skeleton.Joints[JointType.WristLeft]);
                                    ScalePosition(elbowRight, skeleton.Joints[JointType.ElbowRight]);
                                    ScalePosition(elbowLeft, skeleton.Joints[JointType.ElbowLeft]);
                                    ScalePosition(shoulderRight, skeleton.Joints[JointType.ShoulderRight]);
                                    ScalePosition(shoulderLeft, skeleton.Joints[JointType.ShoulderLeft]);
                                    ScalePosition(ankleRight, skeleton.Joints[JointType.AnkleRight]);
                                    ScalePosition(ankleLeft, skeleton.Joints[JointType.AnkleLeft]);
                                    ScalePosition(kneeRight, skeleton.Joints[JointType.KneeRight]);
                                    ScalePosition(kneeLeft, skeleton.Joints[JointType.KneeLeft]);
                                    ScalePosition(hipRight, skeleton.Joints[JointType.HipRight]);
                                    ScalePosition(hipLeft, skeleton.Joints[JointType.HipLeft]);
                                    GetCameraPoint(skeleton, e);
                                }
                            }
                        }
                    }
                }
            }

            if (recordSet == true)
            {
                using (ColorImageFrame colorFrame = e.OpenColorImageFrame())
                {
                    if (colorFrame == null)
                    {
                        return;
                    }
                    frameNumber++;
                    byte[] pixels = new byte[colorFrame.PixelDataLength];
                    colorFrame.CopyPixelDataTo(pixels);

                    dataStream.Write(pixels, 0, colorFrame.PixelDataLength);

                    int stride = colorFrame.Width * 4;
                    masterView.Source = BitmapSource.Create(colorFrame.Width, colorFrame.Height, 96, 96, PixelFormats.Bgr32, null, pixels, stride);
                }
                using (SkeletonFrame skeletonFrame = e.OpenSkeletonFrame())
                {
                    if (skeletonFrame != null)
                    {
                        Console.Write(masterLeftWristData.Count);
                        Skeleton[] data = new Skeleton[skeletonFrame.SkeletonArrayLength];
                        skeletonFrame.CopySkeletonDataTo(data);

                        foreach (Skeleton skeleton in data)
                        {
                            if (skeleton.TrackingState == SkeletonTrackingState.Tracked)
                            {
                                SkeletonPoint point = skeleton.Joints[JointType.Head].Position;


                                foreach (String joint in jointsTracked)
                                {
                                    point = getJoint(joint, skeleton).Position;
                                    writer.Write(joint + ": " + point.X + " " + point.Y + " " + point.Z + "\r\n");
                                }

                                writer.Write("\r\n");


                                //Somewhere after all this code has run, we need to finish construction of
                                //our FileStreamReader for Student [and master?]
                            }
                        }
                    }
                }
            }
        }
コード例 #17
0
 private Bitmap ColorImageFrame2Bitmap(ColorImageFrame image)
 {
     return(null);
 }
コード例 #18
0
        private void SensorColorFrameReady(object sender, ColorImageFrameReadyEventArgs e)
        {
            Hsv lowerLimit = new Hsv(40, 100, 100);
            Hsv upperLimit = new Hsv(80, 255, 255);

            using (ColorImageFrame colorFrame = e.OpenColorImageFrame())
            {
                if (colorFrame != null)
                {
                    byte[] datosColor = new byte[colorFrame.PixelDataLength];

                    colorFrame.CopyPixelDataTo(datosColor);

                    System.Drawing.Bitmap bmp             = Helper.ImageToBitmap(colorFrame);
                    Image <Hsv, Byte>     currentFrameHSV = new Image <Hsv, byte>(bmp);
                    // Copy the pixel data from the image to a temporary array

                    Image <Gray, Byte> grayFrame = currentFrameHSV.Convert <Gray, Byte>();

                    Image <Gray, Byte> imageHSVDest = currentFrameHSV.InRange(lowerLimit, upperLimit);
                    //imageHSVDest.Erode(200);
                    VectorOfVectorOfPoint vectorOfPoint = Helper.FindContours(imageHSVDest);
                    //VectorOfPointF vf = new VectorOfPointF();
                    for (int i = 0; i < vectorOfPoint.Size; i++)
                    {
                        var contour = vectorOfPoint[i];
                        var area    = CvInvoke.ContourArea(contour);
                        if (area > 100)
                        {
                            System.Drawing.Rectangle rec = CvInvoke.BoundingRectangle(contour);
                            Point p1 = new Point(rec.X, rec.Y);
                            Point p2 = new Point(rec.X + rec.Width, rec.Y + rec.Height);
                            ObjetoX = (p1.X + p2.X) / 2;
                            ObjetoY = (p1.Y + p2.Y) / 2;
                            //rect = new Rect(p1, p2);

                            Console.WriteLine($"x: {ObjetoX} y: {ObjetoY}");
                            //currentFrame.Draw(rec, new Bgr(0, double.MaxValue, 0), 3);
                        }
                    }
                    colorStream.Source = BitmapSource.Create(
                        colorFrame.Width, colorFrame.Height,
                        96,
                        96,
                        PixelFormats.Bgr32,
                        null,
                        datosColor,
                        colorFrame.Width * colorFrame.BytesPerPixel
                        );


                    //colorFrame.CopyPixelDataTo(this.colorPixels);
                    //// Write the pixel data into our bitmap
                    //this.colorBitmap.WritePixels(
                    //    new Int32Rect(0, 0, this.colorBitmap.PixelWidth, this.colorBitmap.PixelHeight),
                    //    this.colorPixels,
                    //    this.colorBitmap.PixelWidth * sizeof(int),
                    //    0);
                }
            }
        }
コード例 #19
0
 /// <summary>
 /// Initializes a new instance of the <see cref="ColorImageFrameLuminanceSource"/> class.
 /// </summary>
 /// <param name="bitmap">The bitmap.</param>
 public ColorImageFrameLuminanceSource(ColorImageFrame bitmap)
     : this(bitmap, true)
 {
 }
コード例 #20
0
        private void ColorImageReady(object sender, ColorImageFrameReadyEventArgs e)
        {
            using (ColorImageFrame imageFrame = e.OpenColorImageFrame())
            {
                if (imageFrame != null)
                {
                    // We need to detect if the format has changed.
                    bool haveNewFormat = this.lastImageFormat != imageFrame.Format;
                    bool convertToRgb  = false;
                    int  bytesPerPixel = imageFrame.BytesPerPixel;

                    if (imageFrame.Format == ColorImageFormat.RawBayerResolution640x480Fps30 ||
                        imageFrame.Format == ColorImageFormat.RawBayerResolution1280x960Fps12)
                    {
                        convertToRgb  = true;
                        bytesPerPixel = 4;
                    }

                    if (haveNewFormat)
                    {
                        if (convertToRgb)
                        {
                            this.rawPixelData = new byte[imageFrame.PixelDataLength];
                            this.pixelData    = new byte[bytesPerPixel * imageFrame.Width * imageFrame.Height];
                        }
                        else
                        {
                            this.pixelData = new byte[imageFrame.PixelDataLength];
                        }
                    }

                    if (convertToRgb)
                    {
                        imageFrame.CopyPixelDataTo(this.rawPixelData);
                        ConvertBayerToRgb32(imageFrame.Width, imageFrame.Height);
                    }
                    else
                    {
                        imageFrame.CopyPixelDataTo(this.pixelData);
                    }

                    // A WriteableBitmap is a WPF construct that enables resetting the Bits of the image.
                    // This is more efficient than creating a new Bitmap every frame.
                    if (haveNewFormat)
                    {
                        PixelFormat format = PixelFormats.Bgr32;
                        if (imageFrame.Format == ColorImageFormat.InfraredResolution640x480Fps30)
                        {
                            format = PixelFormats.Gray16;
                        }

                        kinectColorImage.Visibility = Visibility.Visible;
                        this.outputImage            = new WriteableBitmap(
                            imageFrame.Width,
                            imageFrame.Height,
                            96,  // DpiX
                            96,  // DpiY
                            format,
                            null);

                        this.kinectColorImage.Source = this.outputImage;
                    }

                    this.outputImage.WritePixels(
                        new Int32Rect(0, 0, imageFrame.Width, imageFrame.Height),
                        this.pixelData,
                        imageFrame.Width * bytesPerPixel,
                        0);

                    this.lastImageFormat = imageFrame.Format;

                    UpdateFrameRate();
                }
            }
        }
コード例 #21
0
        //corre cada que hay un nuevo frame
        void sensor_AllFramesReady(object sender, AllFramesReadyEventArgs e)
        {
            //dibuja la camara rgb
            using (ColorImageFrame colorFrame = e.OpenColorImageFrame()) {
                if (colorFrame == null)
                {
                    return;
                }

                byte[] colorData = new byte[colorFrame.PixelDataLength];
                colorFrame.CopyPixelDataTo(colorData);

                kinectVideo.Source = BitmapSource.Create(colorFrame.Width,
                                                         colorFrame.Height,
                                                         96,
                                                         96,
                                                         PixelFormats.Bgr32,
                                                         null,
                                                         colorData,
                                                         colorFrame.Width * colorFrame.BytesPerPixel);
            }

            using (SkeletonFrame skeletonFrame = e.OpenSkeletonFrame()) {
                //salir si no hay frame
                if (skeletonFrame == null)
                {
                    return;
                }

                //si cambian los esqueletos actualizar la variable
                if (skeletons == null ||
                    skeletons.Length != skeletonFrame.SkeletonArrayLength)
                {
                    skeletons = new Skeleton[skeletonFrame.SkeletonArrayLength];
                }
                //copiar esqueletos a la matriz
                skeletonFrame.CopySkeletonDataTo(skeletons);

                // seleccionar el esqueleto mas cercano
                Skeleton closestSkeleton = (from s in skeletons
                                            where s.TrackingState == SkeletonTrackingState.Tracked &&
                                            s.Joints[JointType.Head].TrackingState == JointTrackingState.Tracked
                                            select s).OrderBy(s => s.Joints[JointType.Head].Position.Z)
                                           .FirstOrDefault();
                //si no hay esqueleto cercano salir
                if (closestSkeleton == null)
                {
                    return;
                }

                //asignar los joints a una variable para facil identificacion
                Joint head      = closestSkeleton.Joints[JointType.Head];
                Joint rightHand = closestSkeleton.Joints[JointType.HandRight];
                Joint leftHand  = closestSkeleton.Joints[JointType.HandLeft];

                //si no se detectan bien los 3 joints salir
                if (head.TrackingState != JointTrackingState.Tracked ||
                    rightHand.TrackingState != JointTrackingState.Tracked ||
                    leftHand.TrackingState != JointTrackingState.Tracked)
                {
                    //Don't have a good read on the joints so we cannot process gestures
                    return;
                }

                //actualizar la posicion de las elipses
                SetEllipsePosition(ellipseHead, head, false);
                SetEllipsePosition(ellipseLeftHand, leftHand, isBackGestureActive);
                SetEllipsePosition(ellipseRightHand, rightHand, isForwardGestureActive);

                //checar si se activa el gesto o no
                ProcessForwardBackGesture(head, rightHand, leftHand);
            }
        }
コード例 #22
0
 /// <summary>
 /// Process data from one Kinect color frame.
 /// </summary>
 /// <param name="colorData">
 /// Kinect color data.
 /// </param>
 /// <param name="colorFrame">
 /// <see cref="ColorImageFrame"/> from which we obtained color data.
 /// </param>
 public virtual void ProcessColor(byte[] colorData, ColorImageFrame colorFrame)
 {
 }
コード例 #23
0
        private void AllFramesReady(object sender, AllFramesReadyEventArgs e)
        {
            using (ColorImageFrame colorFrame = e.OpenColorImageFrame())
                using (DepthImageFrame depthFrame = e.OpenDepthImageFrame())
                    using (SkeletonFrame skeletonFrame = e.OpenSkeletonFrame())
                    {
                        if (colorFrame == null || depthFrame == null || skeletonFrame == null)
                        {
                            ErrorWhileRecivingData(skeletonFrame, depthFrame, colorFrame);
                            return;
                        }
                        Skeleton[] skeletons = new Skeleton[skeletonFrame.SkeletonArrayLength];
                        skeletonFrame.CopySkeletonDataTo(skeletons);
                        short[,] depths = new short[depthFrame.Width, depthFrame.Height];
                        short[] fromDepthFrame = new short[depthFrame.Width * depthFrame.Height];
                        depthFrame.CopyPixelDataTo(fromDepthFrame);
                        DebuggingTable.LatestCreated["size"] = fromDepthFrame.Length.ToString();
                        for (int i = 0; i < fromDepthFrame.Length; i++)
                        {
                            depths[i / depthFrame.Height, i % depthFrame.Height] = fromDepthFrame[i];
                        }
                        byte[] colorPixels = new byte[colorFrame.PixelDataLength];
                        colorFrame.CopyPixelDataTo(colorPixels);

                        /*
                         * //background removal
                         * if (BackgroundRemoval)
                         * {
                         *  Skeleton skeleton = skeletons.FirstOrDefault();
                         *  int maxDepth = -1, minDepth = -1;
                         *  if (skeleton != default(Skeleton))
                         *  {
                         *      short max = skeleton.Joints.Max(
                         *          joint => depths[(int)(depthFrame.Width * joint.Position.X),
                         *          (int)(depthFrame.Height * joint.Position.Y)]);
                         *      short min = skeleton.Joints.Max(
                         *          joint => depths[(int)(depthFrame.Width * joint.Position.X),
                         *          (int)(depthFrame.Height * joint.Position.Y)]);
                         *      int diff = max - min;
                         *      maxDepth = max + diff;
                         *      minDepth = min - diff;
                         *  }
                         *  for (int x = 0; x < colorFrame.Width; x++)
                         *  {
                         *      for (int y = 0; y < colorFrame.Height; y++)
                         *      {
                         *          bool isProbablyPerson;
                         *          if (skeleton == default(Skeleton))
                         *          {
                         *              isProbablyPerson = false;
                         *          }
                         *          else
                         *          {
                         *              short depth = depths[x, y];
                         *              isProbablyPerson = depth > minDepth && depth < maxDepth;
                         *          }
                         *          colorPixels[(y * colorFrame.Width + x) * 4] = (byte)(isProbablyPerson ? 255 : 0);
                         *      }
                         *  }*/
                        DataRecieved(skeletons, depths, colorPixels);
                    }
        }
コード例 #24
0
 public static Bitmap ToBitmap(this ColorImageFrame image)
 {
     return(image.ToBitmap(PixelFormat.Format32bppRgb));
 }
コード例 #25
0
        private void _sensor_TakeSnapshot(object sender, AllFramesReadyEventArgs e)
        {
            using (ColorImageFrame colorFrame = e.OpenColorImageFrame())
            {
                if (colorFrame == null)
                {
                    return;
                }
                if (!takeSnap)
                {
                    return;
                }
                takeSnap = false;

                byte[] pixels = new byte[colorFrame.PixelDataLength];
                colorFrame.CopyPixelDataTo(pixels);

                BitmapSource image;

                if (irCam)
                {
                    int stride = colorFrame.Width * 2;                                                                                           //IR
                    image         = BitmapSource.Create(colorFrame.Width, colorFrame.Height, 96, 96, PixelFormats.Gray16, null, pixels, stride); //IR
                    image2.Source = image;
                }
                else
                {
                    int stride = colorFrame.Width * 4;                                                                                          // RGB
                    image         = BitmapSource.Create(colorFrame.Width, colorFrame.Height, 96, 96, PixelFormats.Bgr32, null, pixels, stride); //RGB
                    image2.Source = image;
                }



                int index = 0;
                label2.Content      = pixels.Length;
                label_blue.Content  = pixels[index + 0];
                label_green.Content = pixels[index + 1];
                label_red.Content   = pixels[index + 2];
                label_empty.Content = pixels[index + 3];

                label_Width.Content  = colorFrame.Width;
                label_height.Content = colorFrame.Height;

                String fName = string.Format("{0:HH:mm:ss tt}", DateTime.Now);

                String cfName = "";
                for (int i = 0; i < 8; i++)
                {
                    if (fName[i] != ':' && fName[i] != ' ')
                    {
                        cfName += fName[i];
                    }
                }

                label2.Content = cfName + ".txt";

                System.IO.StreamWriter file = new System.IO.StreamWriter(cfName + ".txt", true);
                file.Write(colorFrame.Width.ToString() + "#" + colorFrame.Height.ToString() + "#");
                for (int i = 0; i < pixels.Length; i++)
                {
                    if (i % 4 == 3)
                    {
                        continue;
                    }
                    string s = pixels[i].ToString();
                    file.Write(s + '$');
                }

                file.Close();

                FileStream       stream      = new FileStream(cfName + ".png", FileMode.Create);
                PngBitmapEncoder encoder     = new PngBitmapEncoder();
                TextBlock        myTextBlock = new TextBlock();
                myTextBlock.Text  = "Codec Author is: " + encoder.CodecInfo.Author.ToString();
                encoder.Interlace = PngInterlaceOption.On;
                encoder.Frames.Add(BitmapFrame.Create(image));
                encoder.Save(stream);

                stream.Close();

                //Image image = Image.FromStream(new MemoryStream(pixels));

                /*
                 * var fs = new BinaryWriter(new FileStream(@"C:\\tmp\\" + filename + ".ico", FileMode.Append, FileAccess.Write));
                 * fs.Write(imageByteArray);
                 * fs.Close();
                 */
                /*
                 * System.IO.MemoryStream ms = new System.IO.MemoryStream();
                 * image2.Save(ms, System.Drawing.Imaging.ImageFormat.Jpeg);
                 * byte[] ar = new byte[ms.Length];
                 * ms.Write(ar, 0, ar.Length);
                 *
                 */
            }


            _sensor.AllFramesReady += new EventHandler <AllFramesReadyEventArgs>(_sensor_TakeDepthshot);
        }
コード例 #26
0
        private void OnAllFramesReady(object sender, AllFramesReadyEventArgs allFramesReadyEventArgs)
        {
            ColorImageFrame colorImageFrame = null;
            DepthImageFrame depthImageFrame = null;
            SkeletonFrame   skeletonFrame   = null;

            try
            {
                colorImageFrame = allFramesReadyEventArgs.OpenColorImageFrame();
                depthImageFrame = allFramesReadyEventArgs.OpenDepthImageFrame();
                skeletonFrame   = allFramesReadyEventArgs.OpenSkeletonFrame();

                if (colorImageFrame == null || depthImageFrame == null || skeletonFrame == null)
                {
                    return;
                }

                // Check for image format changes.  The FaceTracker doesn't
                // deal with that so we need to reset.
                if (this.depthImageFormat != depthImageFrame.Format)
                {
                    this.ResetFaceTracking();
                    this.depthImage       = null;
                    this.depthImageFormat = depthImageFrame.Format;
                }

                if (this.colorImageFormat != colorImageFrame.Format)
                {
                    this.ResetFaceTracking();
                    this.colorImage       = null;
                    this.colorImageFormat = colorImageFrame.Format;
                }

                // Create any buffers to store copies of the data we work with
                if (this.depthImage == null)
                {
                    this.depthImage = new short[depthImageFrame.PixelDataLength];
                }

                if (this.colorImage == null)
                {
                    this.colorImage = new byte[colorImageFrame.PixelDataLength];
                }

                // Get the skeleton information
                if (this.skeletonData == null || this.skeletonData.Length != skeletonFrame.SkeletonArrayLength)
                {
                    this.skeletonData = new Skeleton[skeletonFrame.SkeletonArrayLength];
                }

                colorImageFrame.CopyPixelDataTo(this.colorImage);
                depthImageFrame.CopyPixelDataTo(this.depthImage);
                skeletonFrame.CopySkeletonDataTo(this.skeletonData);

                // Update the list of trackers and the trackers with the current frame information
                foreach (Skeleton skeleton in this.skeletonData)
                {
                    if (skeleton.TrackingState == SkeletonTrackingState.Tracked ||
                        skeleton.TrackingState == SkeletonTrackingState.PositionOnly)
                    {
                        // We want keep a record of any skeleton, tracked or untracked.
                        if (!this.trackedSkeletons.ContainsKey(skeleton.TrackingId))
                        {
                            this.trackedSkeletons.Add(skeleton.TrackingId, new SkeletonFaceTracker());
                        }

                        // Give each tracker the upated frame.
                        SkeletonFaceTracker skeletonFaceTracker;
                        if (this.trackedSkeletons.TryGetValue(skeleton.TrackingId, out skeletonFaceTracker))
                        {
                            skeletonFaceTracker.OnFrameReady(this.Kinect, colorImageFormat, colorImage, depthImageFormat, depthImage, skeleton);
                            skeletonFaceTracker.LastTrackedFrame = skeletonFrame.FrameNumber;
                        }
                    }
                }

                this.RemoveOldTrackers(skeletonFrame.FrameNumber);

                this.InvalidateVisual();
            }
            finally
            {
                if (colorImageFrame != null)
                {
                    colorImageFrame.Dispose();
                }

                if (depthImageFrame != null)
                {
                    depthImageFrame.Dispose();
                }

                if (skeletonFrame != null)
                {
                    skeletonFrame.Dispose();
                }
            }
        }
コード例 #27
0
        public void OnAllFramesReady(object sender, AllFramesReadyEventArgs allFramesReadyEventArgs)
        {
            ColorImageFrame colorImageFrame = null;
            DepthImageFrame depthImageFrame = null;
            SkeletonFrame   skeletonFrame   = null;

            try
            {
                colorImageFrame = allFramesReadyEventArgs.OpenColorImageFrame();
                depthImageFrame = allFramesReadyEventArgs.OpenDepthImageFrame();
                skeletonFrame   = allFramesReadyEventArgs.OpenSkeletonFrame();

                if (colorImageFrame == null || depthImageFrame == null || skeletonFrame == null)
                {
                    return;
                }


                if (this.depthImageFormat != depthImageFrame.Format)
                {
                    this.ResetFaceTracking();
                    this.depthImage       = null;
                    this.depthImageFormat = depthImageFrame.Format;
                }

                if (this.colorImageFormat != colorImageFrame.Format)
                {
                    this.ResetFaceTracking();
                    this.colorImage       = null;
                    this.colorImageFormat = colorImageFrame.Format;
                }


                if (this.depthImage == null)
                {
                    this.depthImage = new short[depthImageFrame.PixelDataLength];
                }

                if (this.colorImage == null)
                {
                    this.colorImage = new byte[colorImageFrame.PixelDataLength];
                }


                if (this.skeletonData == null || this.skeletonData.Length != skeletonFrame.SkeletonArrayLength)
                {
                    this.skeletonData = new Skeleton[skeletonFrame.SkeletonArrayLength];
                }

                colorImageFrame.CopyPixelDataTo(this.colorImage);
                depthImageFrame.CopyPixelDataTo(this.depthImage);
                skeletonFrame.CopySkeletonDataTo(this.skeletonData);


                foreach (Skeleton skeleton in this.skeletonData)
                {
                    if (skeleton.TrackingState == SkeletonTrackingState.Tracked ||
                        skeleton.TrackingState == SkeletonTrackingState.PositionOnly)
                    {
                        if (!this.trackedSkeletons.ContainsKey(skeleton.TrackingId))
                        {
                            this.trackedSkeletons.Add(skeleton.TrackingId, new SkeletonFaceTracker());
                        }


                        SkeletonFaceTracker skeletonFaceTracker;
                        if (this.trackedSkeletons.TryGetValue(skeleton.TrackingId, out skeletonFaceTracker))
                        {
                            skeletonFaceTracker.OnFrameReady(this.Kinect, colorImageFormat, colorImage, depthImageFormat, depthImage, skeleton);
                            skeletonFaceTracker.LastTrackedFrame = skeletonFrame.FrameNumber;
                        }
                    }
                }

                this.RemoveOldTrackers(skeletonFrame.FrameNumber);

                this.InvalidateVisual();
            }
            finally
            {
                if (colorImageFrame != null)
                {
                    colorImageFrame.Dispose();
                }

                if (depthImageFrame != null)
                {
                    depthImageFrame.Dispose();
                }

                if (skeletonFrame != null)
                {
                    skeletonFrame.Dispose();
                }
            }
        }
コード例 #28
0
        void kinect_AllFramesReady(object sender, AllFramesReadyEventArgs imageFrames)
        {
            //
            //Color Frame
            //
            ColorImageFrame colorVideoFrame = imageFrames.OpenColorImageFrame();

            if (colorVideoFrame != null)
            {
                //Create array for pixel data and copy it from the image frame
                Byte[] pixelData = new Byte[colorVideoFrame.PixelDataLength];
                colorVideoFrame.CopyPixelDataTo(pixelData);

                //Convert RGBA to BGRA
                Byte[] bgraPixelData = new Byte[colorVideoFrame.PixelDataLength];
                for (int i = 0; i < pixelData.Length; i += 4)
                {
                    bgraPixelData[i]     = pixelData[i + 2];
                    bgraPixelData[i + 1] = pixelData[i + 1];
                    bgraPixelData[i + 2] = pixelData[i];
                    bgraPixelData[i + 3] = (Byte)255; //The video comes with 0 alpha so it is transparent
                }

                // Create a texture and assign the realigned pixels
                colorVideo = new Texture2D(graphics.GraphicsDevice, colorVideoFrame.Width, colorVideoFrame.Height);
                colorVideo.SetData(bgraPixelData);
            }

            //
            // Skeleton Frame
            //
            using (SkeletonFrame skeletonFrame = imageFrames.OpenSkeletonFrame())
            {
                if (skeletonFrame != null)
                {
                    if ((skeletonData == null) || (this.skeletonData.Length != skeletonFrame.SkeletonArrayLength))
                    {
                        this.skeletonData = new Skeleton[skeletonFrame.SkeletonArrayLength];
                    }

                    //Copy the skeleton data to our array
                    skeletonFrame.CopySkeletonDataTo(this.skeletonData);
                }
            }
            if (skeletonData != null)
            {
                foreach (Skeleton skel in skeletonData)
                {
                    if (skel.TrackingState == SkeletonTrackingState.Tracked)
                    {
                        skeleton = skel;
                    }
                }
            }


            DepthImageFrame depthVideoFrame = imageFrames.OpenDepthImageFrame();

            //short[] pixelData = new short[depthVideoFrame.PixelDataLength];
            if (depthVideoFrame != null)
            {
                short[] pixelData = new short[depthVideoFrame.PixelDataLength];
                depthVideoFrame.CopyPixelDataTo(pixelData);
                depthVideo = new Texture2D(graphics.GraphicsDevice, depthVideoFrame.Width, depthVideoFrame.Height);
                depthVideo.SetData(ConvertDepthFrame(pixelData, kinect.DepthStream));
            }
        }
コード例 #29
0
        public void FramesReady(object sender, AllFramesReadyEventArgs e)
        {
            ColorImageFrame Vframe = e.OpenColorImageFrame();

            if (Vframe == null)
            {
                return;
            }
            SkeletonFrame sframe = e.OpenSkeletonFrame();

            if (sframe == null)
            {
                return;
            }
            Skeleton[] skeletons = new Skeleton[sframe.SkeletonArrayLength];
            sframe.CopySkeletonDataTo(skeletons);
            BitmapSource bmp = ImagetoBitMap(Vframe);

            // CroppedBitmap croppedBitmap = new CroppedBitmap(bmp, new Int32Rect(20, 20, 100, 100));
            using (DrawingContext dc = this.drawingGroup.Open())
            {
                // dc.DrawImage(bmp, new Rect(0.0, 0.0, RenderWidth, RenderHeight));
                //dc.DrawRectangle(Brushes.Black, null, new Rect(0.0, 0.0, RenderWidth, RenderHeight));
                if (skeletons != null)
                {
                    foreach (Skeleton item in skeletons)
                    {
                        Random r = new Random();
                        switch (r.Next(1, 4))
                        {
                        case 1:
                            trackedBonPen = new Pen(Brushes.White, 5);
                            break;

                        case 2:
                            trackedBonPen = new Pen(Brushes.Yellow, 5);
                            break;
                        }


                        RenderClippedEdges(item, dc);

                        if (item.TrackingState == SkeletonTrackingState.Tracked)
                        {
                            this.DrawBonesAndJoints(item, dc);
                        }
                        else if (item.TrackingState == SkeletonTrackingState.PositionOnly)
                        {
                            dc.DrawEllipse(
                                this.centerPointBrush,
                                null,
                                this.SkeletonPointToScreen(item.Position),
                                BodyCenterThickness,
                                BodyCenterThickness);
                        }
                    }
                }
            }

            img_sensor.Source = bmp;
        }
コード例 #30
0
        void FramesReady(object sender, AllFramesReadyEventArgs e)
        {
            DepthImageFrame imageFrame = e.OpenDepthImageFrame();
            ColorImageFrame rgbFrame   = e.OpenColorImageFrame();

            if (imageFrame != null && rgbFrame != null)
            {
                int   targetPixelDist = int.MaxValue;
                int[] targetPixelPos  = { imageFrame.Width / 2, imageFrame.Height / 2 };

                short[] pixelData = new short[imageFrame.PixelDataLength];
                imageFrame.CopyPixelDataTo(pixelData);
                Parallel.For(0, imageFrame.Height, new ParallelOptions {
                    MaxDegreeOfParallelism = 16
                }, y =>                                                                                     //int x = 0; x < imageFrame.Width; x++)
                {
                    //ColorImagePoint temp = imageFrame.MapToColorImagePoint(50, y, ColorImageFormat.RgbResolution640x480Fps30);
                    for (int x = 0; x < imageFrame.Width; x++)
                    {
                        if ((ushort)((pixelData[x + y * 640]) >> 3) < maxDist && (ushort)((pixelData[x + y * 640]) >> 3) > minDist)
                        {
                            //Check if pixel is closest one
                            if ((ushort)((pixelData[x + y * 640]) >> 3) < targetPixelDist)
                            {
                                targetPixelDist   = (ushort)((pixelData[x + y * 640]) >> 3); //When you think you're going to use a really long and ugly value once but it turns out you use it a bunch
                                targetPixelPos[0] = x;
                                targetPixelPos[1] = y;
                            }
                        }
                    }
                });
                pictureBox1.Image = ImageToBitmap(rgbFrame);
                Graphics drawStats = Graphics.FromImage(pictureBox1.Image);
                drawStats.FillEllipse(Brushes.Red, targetPixelPos[0], targetPixelPos[1], 10, 10);

                //Start figuring out where to move it if on a frame that is multiple of 4
                frameCount++;
                if (frameCount >= 4)
                {
                    while (port.BytesToRead > 0)
                    {
                        port.ReadByte(); //Throw the bytes away
                    }
                    frameCount = 0;
                    if (targetPixelPos[0] != imageFrame.Width / 2 && targetPixelPos[1] != imageFrame.Height / 2)
                    {
                        //Detected something
                        numberOfTimesDetectedSomething++;
                    }
                    else //No target found
                    {
                        numberOfTimesDetectedNothing++;
                    }
                    if (numberOfTimesDetectedSomething >= 3)
                    {
                        if (numberOfTimesDetectedNothing > 3)
                        {
                            numberOfTimesDetectedNothing = 0;
                        }

                        if (targetPixelPos[1] < (imageFrame.Height / 2) - threshold)
                        {
                            yawLabel.Text = "Up";
                            port.WriteLine("U");
                            port.ReadLine();
                        }
                        else if (targetPixelPos[1] > (imageFrame.Height / 2) + threshold)
                        {
                            yawLabel.Text = "Down";
                            port.WriteLine("D");
                            port.ReadLine();
                        }
                        else
                        {
                            yawLabel.Text = "Target yaw";
                        }

                        //Pitch
                        if (targetPixelPos[0] < (imageFrame.Width / 2) - threshold)
                        {
                            pitchLabel.Text = "Left";
                            port.WriteLine("R");
                            port.ReadLine();
                        }
                        else if (targetPixelPos[0] > (imageFrame.Width / 2) + threshold)
                        {
                            pitchLabel.Text = "Right";
                            port.WriteLine("L");
                            port.ReadLine();
                        }
                        else
                        {
                            pitchLabel.Text = "Target pitch";
                        }
                    }

                    /*if(numberOfTimesDetectedNothing >= 3)
                     * {
                     *  if(numberOfTimesDetectedSomething > 3) numberOfTimesDetectedSomething = 0; //If statement fixes it, no idea why
                     *  //Start scanning for drones
                     *  //Set yaw correctly
                     *  port.WriteLine("P"); //P means set for scanning edit: I should have used S
                     *  port.ReadLine();
                     *  port.WriteLine("L");
                     *  port.ReadLine();
                     * }*/
                }
            }
        }