/// <summary>
        /// Handles the AllFramesReady event of the kinectSensor control.
        /// </summary>
        /// <param name="sender">The source of the event.</param>
        /// <param name="e">The <see cref="Microsoft.Kinect.AllFramesReadyEventArgs"/> instance containing the event data.</param>
        void kinectSensor_AllFramesReady(object sender, AllFramesReadyEventArgs e)
        {
            // Retrieve each single frame and copy the data
            using (ColorImageFrame colorImageFrame = e.OpenColorImageFrame())
            {
                if (colorImageFrame == null)
                {
                    return;
                }
                colorImageFrame.CopyPixelDataTo(colorPixelData);
            }

            using (DepthImageFrame depthImageFrame = e.OpenDepthImageFrame())
            {
                if (depthImageFrame == null)
                {
                    return;
                }
                depthImageFrame.CopyPixelDataTo(depthPixelData);
            }

            using (SkeletonFrame skeletonFrame = e.OpenSkeletonFrame())
            {
                if (skeletonFrame == null)
                {
                    return;
                }
                skeletonFrame.CopySkeletonDataTo(skeletonData);
            }

            // Retrieve the first tracked skeleton if any. Otherwise, do nothing.
            var skeleton = skeletonData.FirstOrDefault(s => s.TrackingState == SkeletonTrackingState.Tracked);

            if (skeleton == null)
            {
                return;
            }

            // Make the faceTracker processing the data.
            FaceTrackFrame faceFrame = faceTracker.Track(kinectSensor.ColorStream.Format, colorPixelData,
                                                         kinectSensor.DepthStream.Format, depthPixelData,
                                                         skeleton);

            // If a face is tracked, then we can use it.
            if (faceFrame.TrackSuccessful)
            {
                var triangles = faceFrame.GetTriangles();
                // Retrieve only the Animation Units coeffs.
                var AUCoeff = faceFrame.GetAnimationUnitCoefficients();

                var jawLowerer = AUCoeff[AnimationUnit.JawLower];
                jawLowerer = jawLowerer < 0 ? 0 : jawLowerer;
                MouthScaleTransform.ScaleY = jawLowerer * 5 + 0.1;
                MouthScaleTransform.ScaleX = (AUCoeff[AnimationUnit.LipStretcher] + 1);

                LeftBrow.Y            = RightBrow.Y = (AUCoeff[AnimationUnit.BrowLower]) * 40;
                RightBrowRotate.Angle = (AUCoeff[AnimationUnit.BrowRaiser] * 20);
                LeftBrowRotate.Angle  = -RightBrowRotate.Angle;
                CanvasRotate.Angle    = -faceFrame.Rotation.Z;
                // CanvasTranslate.X = faceFrame.Translation.X;
                // CanvasTranslate.Y = faceFrame.Translation.Y;

                if (logToFile)
                {
                    writeToFile(filename_txt.Text, faceFrame);
                }

                if (writeToOSC)
                {
                    sendOsc(osc_channel_txt.Text, faceFrame, oscWriter);
                }
            }
        }
예제 #2
0
        void sensor_AllFramesReady(object sender, Microsoft.Kinect.AllFramesReadyEventArgs e)
        {
            if (this.command == "Stop")
            {
                Bot.stop();
            }
            if (this.command == "Forward")
            {
                Bot.traverse();
            }
            if (this.command == "Right")
            {
                Bot.turnRight();
            }
            if (this.command == "Left")
            {
                Bot.turnRight();
            }
            xf++;
            if (xf % 5 == 0)
            {
                xf = 0;
                if (this.command != null)
                {
                    using (ColorImageFrame colorFrame = e.OpenColorImageFrame())
                    {
                        using (DepthImageFrame depthFrame = e.OpenDepthImageFrame())
                        {
                            using (SkeletonFrame skeletonFrame = e.OpenSkeletonFrame())
                            {
                                humanPosition = frameToHuman(skeletonFrame);

                                if (colorFrame != null)
                                {
                                    // Copy the pixel data from the image to a temporary array
                                    colorFrame.CopyPixelDataTo(this.colorPixels);

                                    // Write the pixel data into our bitmap
                                    this.colorBitmap.WritePixels(
                                        new Int32Rect(0, 0, this.colorBitmap.PixelWidth, this.colorBitmap.PixelHeight),
                                        this.colorPixels,
                                        this.colorBitmap.PixelWidth * sizeof(int),
                                        0);

                                    // Error here due to OpenCV_core290.dll
                                    //int[] objPos = new int[2];
                                    //objPos = tmp.matchColor(ImageProc.colorFrameToImage(colorFrame));
                                    //if (objPos != null)
                                    //{
                                    //    short blobDepth = getDepthAtPoint(objPos, depthFrame);
                                    //    this.lblObject.Content = objPos[0] + ", " + objPos[1] + ", " + blobDepth;
                                    //}
                                    //else
                                    //{
                                    //    this.lblObject.Content = "Null";
                                    //}
                                    System.Drawing.Bitmap bmp    = ImageProc.colorFrameToAforge(colorFrame);
                                    HSLFiltering          filter = new HSLFiltering();
                                    // set color ranges to keep
                                    if (objec[0] == -1)
                                    {
                                        if (command == "Fetching Bottle")
                                        {
                                            filter.Hue        = bottleH;
                                            filter.Saturation = bottleS;
                                            filter.Luminance  = bottleL;
                                        }
                                        else if (command == "Fetching Box")
                                        {
                                            filter.Hue        = boxH;
                                            filter.Saturation = boxS;
                                            filter.Luminance  = boxL;
                                        }

                                        //// apply the filter
                                        filter.ApplyInPlace(bmp);

                                        BlobCounter        blobCounter = new BlobCounter(bmp);
                                        int                i           = blobCounter.ObjectsCount;
                                        ExtractBiggestBlob fil         = new ExtractBiggestBlob();

                                        int[] pp = new int[2];
                                        pp[0] = 0;
                                        pp[1] = 0;
                                        int h = 0;
                                        if (i > 0)
                                        {
                                            fil.Apply(bmp);
                                            pp[0] = fil.BlobPosition.X;
                                            pp[1] = fil.BlobPosition.Y;

                                            h = fil.Apply(bmp).Height;
                                        }

                                        short blobDepth = getDepthAtPoint(pp, depthFrame);
                                        this.lblObject.Content = pp[0] + ", " + pp[1] + ", " + blobDepth;
                                        this.objec[0]          = pp[0];
                                        this.objec[1]          = pp[1];
                                        this.objec[2]          = blobDepth;
                                    }
                                    else
                                    {
                                        filter.Hue        = botH;
                                        filter.Saturation = botS;
                                        filter.Luminance  = botL;
                                        filter.ApplyInPlace(bmp);

                                        BlobCounter        blobCounter = new BlobCounter(bmp);
                                        int                i           = blobCounter.ObjectsCount;
                                        ExtractBiggestBlob fil         = new ExtractBiggestBlob();

                                        int[] pp = new int[2];
                                        pp[0] = 0;
                                        pp[1] = 0;
                                        int h = 0;
                                        if (i > 0)
                                        {
                                            fil.Apply(bmp);
                                            pp[0] = fil.BlobPosition.X;
                                            pp[1] = fil.BlobPosition.Y;

                                            h = fil.Apply(bmp).Height;
                                        }

                                        short blobDepth = getDepthAtPoint(pp, depthFrame);
                                        this.lblBot.Content = pp[0] + ", " + pp[1] + ", " + blobDepth;
                                        this.bot[0]         = pp[0];
                                        this.bot[1]         = pp[1];
                                        this.bot[2]         = blobDepth;
                                    }

                                    //Assign Manual Position to bot and object
                                }
                                if (humanPosition != null)
                                {
                                    this.lblHuman.Content = humanPosition[0] + ", " + humanPosition[1] + ", " + humanPosition[2];
                                }
                                else
                                {
                                    this.lblHuman.Content = "No Human detected";
                                }

                                if (this.path == 0)
                                {
                                    if (humanPosition != null)
                                    {
                                        if (Bot.moveDoraemon(this.bot[0], this.humanPosition[0], this.bot[2], this.humanPosition[2]) == 0)
                                        {
                                            this.path = 1;
                                        }
                                    }
                                }
                                else
                                {
                                    if (Bot.moveDoraemon(this.bot[0], this.objec[0], this.bot[2], this.objec[2]) == 0)
                                    {
                                        Bot.stop();
                                    }
                                }
                            }
                        }
                    }
                    this.lbl.Content = command;
                }
            }
        }
        private void SensorAllFramesReady(object sender, AllFramesReadyEventArgs e)
        {
            Skeleton[] skeletons = new Skeleton[0];

            bool depthReceived = false;
            bool colorReceived = false;

            using (DepthImageFrame framesDistancia = e.OpenDepthImageFrame())
            {
                if (framesDistancia == null)
                {
                    return;
                }

                framesDistancia.CopyDepthImagePixelDataTo(this.depthPixels);

                depthReceived = true;


                if (datosDistancia == null)
                {
                    datosDistancia = new short[framesDistancia.PixelDataLength];
                }

                if (colorImagenDistancia == null)
                {
                    colorImagenDistancia = new byte[framesDistancia.PixelDataLength * 4];
                }

                framesDistancia.CopyPixelDataTo(datosDistancia);
            }

            using (ColorImageFrame colorFrame = e.OpenColorImageFrame())
            {
                if (colorFrame != null)
                {
                    colorFrame.CopyPixelDataTo(this.colorPixels);

                    colorReceived = true;

                    System.Drawing.Bitmap bmp             = EmguCVHelper.ImageToBitmap(colorFrame);
                    Image <Hsv, Byte>     currentFrameHSV = new Image <Hsv, byte>(bmp);

                    Image <Gray, Byte> grayFrame = currentFrameHSV.Convert <Gray, Byte>();

                    Image <Gray, Byte> imageHSVDest = currentFrameHSV.InRange(lowerLimit, upperLimit);
                    imageHSVDest.Erode(100);
                    VectorOfVectorOfPoint vectorOfPoint = EmguCVHelper.FindContours(imageHSVDest);

                    for (int i = 0; i < vectorOfPoint.Size; i++)
                    {
                        var contour = vectorOfPoint[i];
                        var area    = CvInvoke.ContourArea(contour);
                        if (area > 100)
                        {
                            System.Drawing.Rectangle rec = CvInvoke.BoundingRectangle(contour);
                            Point p1 = new Point(rec.X, rec.Y);
                            Point p2 = new Point(rec.X + rec.Width, rec.Y + rec.Height);
                            ObjetoX = (p1.X + p2.X) / 2;
                            ObjetoY = (p1.Y + p2.Y) / 2;

                            if (true == depthReceived)
                            {
                                this.sensor.CoordinateMapper.MapDepthFrameToColorFrame(
                                    DepthFormat,
                                    this.depthPixels,
                                    ColorFormat,
                                    this.colorCoordinates);


                                int             depthIndex = (int)ObjetoX + ((int)ObjetoY * this.depthWidth);
                                DepthImagePixel depthPixel = this.depthPixels[depthIndex];


                                ObjetoZ = datosDistancia[depthIndex] >> 3;

                                int X = (int)ObjetoX / this.colorToDepthDivisor;
                                int Y = (int)ObjetoY / this.colorToDepthDivisor;
                            }


                            if (ObjetoZ > 0)
                            {
                                skelObjeto = DistanceHelper.ObtenerSkelPoint((int)ObjetoX, (int)ObjetoY,
                                                                             ObjetoZ, this.sensor);

                                flagObjeto = true;
                            }
                        }
                    }

                    colorFrame.CopyPixelDataTo(this.colorPixels);
                    // Write the pixel data into our bitmap
                    this.colorBitmap.WritePixels(
                        new Int32Rect(0, 0, this.colorBitmap.PixelWidth, this.colorBitmap.PixelHeight),
                        this.colorPixels,
                        this.colorBitmap.PixelWidth * sizeof(int),
                        0);
                }
            }

            using (SkeletonFrame skeletonFrame = e.OpenSkeletonFrame())
            {
                if (skeletonFrame != null)
                {
                    skeletons = new Skeleton[skeletonFrame.SkeletonArrayLength];
                    skeletonFrame.CopySkeletonDataTo(skeletons);
                }
            }

            using (DrawingContext dc = this.drawingGroup.Open())
            {
                // Draw a transparent background to set the render size
                //dc.DrawRectangle(Brushes.Black, null, new Rect(0.0, 0.0, RenderWidth, RenderHeight));
                dc.DrawImage(this.colorBitmap, new Rect(0.0, 0.0, RenderWidth, RenderHeight));
                if (skeletons.Length != 0)
                {
                    foreach (Skeleton skel in skeletons)
                    {
                        if (skel.TrackingState == SkeletonTrackingState.Tracked)
                        {
                            this.DrawBonesAndJoints(skel, dc);
                        }

                        //Toma de mediciones de mano, hombro y codo derecho:
                        ManoDerecha = skel.Joints[JointType.HandRight];
                        //Joint munecaDer = skel.Joints[JointType.WristRight];
                        CodoDerecho   = skel.Joints[JointType.ElbowRight];
                        HombroDerecho = skel.Joints[JointType.ShoulderRight];

                        //Dibujo un punto negro sobre el objeto detectado
                        Point objeto = new Point(this.ObjetoX, this.ObjetoY);
                        dc.DrawEllipse(Brushes.Black, new Pen(Brushes.Black, 5), objeto, 5, 5);

                        if ((HombroDerecho.TrackingState == JointTrackingState.Tracked) &&
                            (ManoDerecha.TrackingState == JointTrackingState.Tracked) &&
                            (CodoDerecho.TrackingState == JointTrackingState.Tracked))
                        {
                            if (flagObjeto && !flagSkeleton)
                            {
                                CalcularAngulosFinales();
                            }

                            //Console.WriteLine($"Mano X Y Z {handRight.Position.X} {handRight.Position.Y} {handRight.Position.Z}");
                            //Console.WriteLine($"Objeto X Y Z {skelObjeto.X} {skelObjeto.Y} {skelObjeto.Z}");

                            if (DistanceHelper.ObtenerDistancia(ManoDerecha, skelObjeto) < 0.1)
                            {
                                //significa que se llegó al objeto, por lo que se cierra la ventana y se envían
                                //los datos.
                                resultado = true;
                                this.Close();
                            }
                        }
                    }
                }

                // prevent drawing outside of our render area
                this.drawingGroup.ClipGeometry = new RectangleGeometry(new Rect(0.0, 0.0, RenderWidth, RenderHeight));
            }
        }
예제 #4
0
        private void processFrame(ColorImageFrame colorFrame)
        {
            //Console.WriteLine("Processing frame");
            //using (ColorImageFrame colorFrame = e.OpenColorImageFrame())
            {
                if (colorFrame != null)
                {
                    bool colorFormat = this.rgbImageFormat != colorFrame.Format;

                    if (colorFormat)
                    {
                        width  = colorFrame.Width;
                        height = colorFrame.Height;

                        this.colorpixelData = new byte[colorFrame.PixelDataLength];
                        this.colorFrameRGB  = new byte[colorFrame.Width * colorFrame.Height * Bgr32BytesPerPixel];

                        this.processedBitmap = new WriteableBitmap(colorFrame.Width, colorFrame.Height, 96, 96, PixelFormats.Bgr32, null);

                        //this.procImage.Source = this.processedBitmap; // do now or later?

                        //Console.WriteLine("Frame written");
                    }



                    colorFrame.CopyPixelDataTo(this.colorpixelData);

                    // Output raw image
                    this.outputColorBitmap = new WriteableBitmap(colorFrame.Width, colorFrame.Height, 96, 96, PixelFormats.Bgr32, null);
                    this.outputColorBitmap.WritePixels(
                        (new Int32Rect(0, 0, colorFrame.Width, colorFrame.Height)),
                        colorpixelData,
                        colorFrame.Width * Bgr32BytesPerPixel,
                        0);
                    this.raw_image.Source = this.outputColorBitmap;

                    // PROCESS THE DATA //
                    colorpixelData = convertToHSL(colorpixelData);
                    frameProcessor(colorpixelData);
                    //processedcolorpixelData = colorpixelData;

                    lbl_Pixels.Content = "Pixels: " + pixels;



                    // Output processed image
                    this.processedBitmap.WritePixels(
                        new Int32Rect(0, 0, colorFrame.Width, colorFrame.Height),
                        processedcolorpixelData,
                        colorFrame.Width * Bgr32BytesPerPixel,
                        0);

                    this.rgbImageFormat = colorFrame.Format;

                    this.procImage.Source = this.processedBitmap;
                    Console.WriteLine("Frame written");

                    colorFrame.Dispose();
                }
            }
        }
        /// <summary>
        /// Event handler for Kinect sensor's DepthFrameReady event
        /// </summary>
        /// <param name="sender">object sending the event</param>
        /// <param name="e">event arguments</param>
        private void SensorAllFramesReady(object sender, AllFramesReadyEventArgs e)
        {
            // in the middle of shutting down, so nothing to do
            if (null == this.sensor)
            {
                return;
            }

            bool depthReceived = false;
            bool colorReceived = false;

            using (DepthImageFrame depthFrame = e.OpenDepthImageFrame())
            {
                if (null != depthFrame)
                {
                    // Copy the pixel data from the image to a temporary array
                    depthFrame.CopyDepthImagePixelDataTo(this.depthPixels);

                    depthReceived = true;
                }
            }

            using (ColorImageFrame colorFrame = e.OpenColorImageFrame())
            {
                if (null != colorFrame)
                {
                    // Copy the pixel data from the image to a temporary array
                    colorFrame.CopyPixelDataTo(this.colorPixels);

                    colorReceived = true;
                }
            }

            // do our processing outside of the using block
            // so that we return resources to the kinect as soon as possible
            if (true == depthReceived)
            {
                this.sensor.CoordinateMapper.MapDepthFrameToColorFrame(
                    DepthFormat,
                    this.depthPixels,
                    ColorFormat,
                    this.colorCoordinates);

                Array.Clear(this.greenScreenPixelData, 0, this.greenScreenPixelData.Length);

                // loop over each row and column of the depth
                for (int y = 0; y < this.depthHeight; ++y)
                {
                    for (int x = 0; x < this.depthWidth; ++x)
                    {
                        // calculate index into depth array
                        int depthIndex = x + (y * this.depthWidth);

                        DepthImagePixel depthPixel = this.depthPixels[depthIndex];

                        int player = depthPixel.PlayerIndex;

                        // if we're tracking a player for the current pixel, do green screen
                        if (player > 0)
                        {
                            // retrieve the depth to color mapping for the current depth pixel
                            ColorImagePoint colorImagePoint = this.colorCoordinates[depthIndex];

                            // scale color coordinates to depth resolution
                            int colorInDepthX = colorImagePoint.X / this.colorToDepthDivisor;
                            int colorInDepthY = colorImagePoint.Y / this.colorToDepthDivisor;

                            // make sure the depth pixel maps to a valid point in color space
                            // check y > 0 and y < depthHeight to make sure we don't write outside of the array
                            // check x > 0 instead of >= 0 since to fill gaps we set opaque current pixel plus the one to the left
                            // because of how the sensor works it is more correct to do it this way than to set to the right
                            if (colorInDepthX > 0 && colorInDepthX < this.depthWidth && colorInDepthY >= 0 && colorInDepthY < this.depthHeight)
                            {
                                // calculate index into the green screen pixel array
                                int greenScreenIndex = colorInDepthX + (colorInDepthY * this.depthWidth);

                                // set opaque
                                this.greenScreenPixelData[greenScreenIndex] = opaquePixelValue;

                                // compensate for depth/color not corresponding exactly by setting the pixel
                                // to the left to opaque as well
                                this.greenScreenPixelData[greenScreenIndex - 1] = opaquePixelValue;
                            }
                        }
                    }
                }
            }

            // do our processing outside of the using block
            // so that we return resources to the kinect as soon as possible
            if (true == colorReceived)
            {
                // Write the pixel data into our bitmap
                this.colorBitmap.WritePixels(
                    new Int32Rect(0, 0, this.colorBitmap.PixelWidth, this.colorBitmap.PixelHeight),
                    this.colorPixels,
                    this.colorBitmap.PixelWidth * sizeof(int),
                    0);

                if (this.playerOpacityMaskImage == null)
                {
                    this.playerOpacityMaskImage = new WriteableBitmap(
                        this.depthWidth,
                        this.depthHeight,
                        96,
                        96,
                        PixelFormats.Bgra32,
                        null);

                    MaskedColor.OpacityMask = new ImageBrush {
                        ImageSource = this.playerOpacityMaskImage
                    };
                }

                this.playerOpacityMaskImage.WritePixels(
                    new Int32Rect(0, 0, this.depthWidth, this.depthHeight),
                    this.greenScreenPixelData,
                    this.depthWidth * ((this.playerOpacityMaskImage.Format.BitsPerPixel + 7) / 8),
                    0);
            }
        }
예제 #6
0
        /// <summary>
        /// RGBカメラ、スケルトンのフレーム更新イベント
        /// </summary>
        /// <param name="sender"></param>
        /// <param name="e"></param>
        void kinect_AllFramesReady(object sender, AllFramesReadyEventArgs e)
        {
            try {
                KinectSensor kinect = sender as KinectSensor;

                using (ColorImageFrame colorFrame = e.OpenColorImageFrame()) {
                    if (colorFrame != null)
                    {
                        byte[] colorPixel = new byte[colorFrame.PixelDataLength];
                        colorFrame.CopyPixelDataTo(colorPixel);
                        imageRgb.Source = BitmapSource.Create(colorFrame.Width, colorFrame.Height, 96, 96,
                                                              PixelFormats.Bgr32, null, colorPixel, colorFrame.Width * colorFrame.BytesPerPixel);
                    }
                }

                using (SkeletonFrame skeletonFrame = e.OpenSkeletonFrame()) {
                    if (skeletonFrame != null)
                    {
                        // トラッキングされているスケルトンのジョイントを描画する
                        Skeleton skeleton = skeletonFrame.GetFirstTrackedSkeleton();

                        if ((skeleton != null) && (skeleton.TrackingState == SkeletonTrackingState.Tracked))
                        {
                            Joint hand = skeleton.Joints[JointType.HandRight];
                            if (hand.TrackingState == JointTrackingState.Tracked)
                            {
                                ImageSource   source        = imageRgb.Source;
                                DrawingVisual drawingVisual = new DrawingVisual();

                                using (DrawingContext drawingContext = drawingVisual.RenderOpen()) {
                                    //バイト列をビットマップに展開
                                    //描画可能なビットマップを作る
                                    drawingContext.DrawImage(imageRgb.Source,
                                                             new Rect(0, 0, source.Width, source.Height));

                                    // 手の位置に円を描画
                                    DrawSkeletonPoint(drawingContext, hand);
                                }

                                // 描画可能なビットマップを作る
                                // http://stackoverflow.com/questions/831860/generate-bitmapsource-from-uielement
                                RenderTargetBitmap bitmap = new RenderTargetBitmap((int)source.Width,
                                                                                   (int)source.Height, 96, 96, PixelFormats.Default);
                                bitmap.Render(drawingVisual);

                                imageRgb.Source = bitmap;

                                // Frame中の手の位置をディスプレイの位置に対応付ける
                                ColorImagePoint point = kinect.MapSkeletonPointToColor(hand.Position,           // スケルトン座標 → RGB画像座標
                                                                                       kinect.ColorStream.Format);
                                System.Windows.Forms.Screen screen = System.Windows.Forms.Screen.AllScreens[0]; // メインディスプレイの情報を取得
                                point.X = (point.X * screen.Bounds.Width) / kinect.ColorStream.FrameWidth;
                                point.Y = (point.Y * screen.Bounds.Height) / kinect.ColorStream.FrameHeight;

                                // マウスカーソルの移動
                                SendInput.MouseMove(point.X, point.Y, screen);

                                // クリック動作
                                if (IsClicked(skeletonFrame, point))
                                {
                                    SendInput.LeftClick();
                                }
                            }
                        }
                    }
                }
            }
            catch (Exception ex) {
                MessageBox.Show(ex.Message);
            }
        }
예제 #7
0
        private byte[][] SensorAllFramesReady(object sender, AllFramesReadyEventArgs e)
        {
            bool depthReceived = false;
            bool colorReceived = false;

            DepthImagePixel[] depthPixels;
            byte[]            colorPixels;
            ColorImagePoint[] colorCoordinates;
            int colorToDepthDivisor;

            byte[] greenScreenPixelData;

            // Allocate space to put the color pixels we'll create
            depthPixels          = new DepthImagePixel[this.kinectSensor.DepthStream.FramePixelDataLength];
            colorPixels          = new byte[this.kinectSensor.ColorStream.FramePixelDataLength];
            greenScreenPixelData = new byte[this.kinectSensor.DepthStream.FramePixelDataLength];
            colorCoordinates     = new ColorImagePoint[this.kinectSensor.DepthStream.FramePixelDataLength];

            int colorWidth  = this.kinectSensor.ColorStream.FrameWidth;
            int colorHeight = this.kinectSensor.ColorStream.FrameHeight;

            colorToDepthDivisor = colorWidth / 640;

            byte[][] results = new byte[2][]; // kinectSensor.DepthStream.FramePixelDataLength];

            DepthImageFormat DepthFormat = DepthImageFormat.Resolution640x480Fps30;
            ColorImageFormat ColorFormat = ColorImageFormat.RgbResolution640x480Fps30;

            using (DepthImageFrame depthFrame = e.OpenDepthImageFrame())
            {
                if (null != depthFrame)
                {
                    // Copy the pixel data from the image to a temporary array
                    depthFrame.CopyDepthImagePixelDataTo(depthPixels);
                    depthReceived = true;
                }
            }

            using (ColorImageFrame colorFrame = e.OpenColorImageFrame())
            {
                if (null != colorFrame)
                {
                    // Copy the pixel data from the image to a temporary array
                    this.outputColorBitmap = new WriteableBitmap(640, 480, 96, 96, PixelFormats.Bgr32, null);
                    colorFrame.CopyPixelDataTo(colorPixels);
                    colorReceived = true;
                }
            }

            if (true == depthReceived)
            {
                this.kinectSensor.CoordinateMapper.MapDepthFrameToColorFrame(
                    DepthFormat,
                    depthPixels,
                    ColorFormat,
                    colorCoordinates);

                Array.Clear(greenScreenPixelData, 0, greenScreenPixelData.Length);

                // loop over each row and column of the depth
                for (int y = 0; y < 480; ++y)
                {
                    for (int x = 0; x < 640; ++x)
                    {
                        // calculate index into depth array
                        int depthIndex = x + (y * 640);

                        DepthImagePixel depthPixel = depthPixels[depthIndex];

                        int player = depthPixel.PlayerIndex;

                        // if we're tracking a player for the current pixel, do green screen
                        if (player > 0)
                        {
                            // retrieve the depth to color mapping for the current depth pixel
                            ColorImagePoint colorImagePoint = colorCoordinates[depthIndex];

                            // scale color coordinates to depth resolution
                            int colorInDepthX = colorImagePoint.X / colorToDepthDivisor;
                            int colorInDepthY = colorImagePoint.Y / colorToDepthDivisor;

                            // make sure the depth pixel maps to a valid point in color space
                            if (colorInDepthX > 0 && colorInDepthX < 640 && colorInDepthY >= 0 && colorInDepthY < 480)
                            {
                                // calculate index into the green screen pixel array
                                int greenScreenIndex = colorInDepthX + (colorInDepthY * 640);

                                // set opaque
                                greenScreenPixelData[greenScreenIndex] = 33;

                                // compensate for depth/color not corresponding exactly by setting the pixel
                                // to the left to opaque as well
                                greenScreenPixelData[greenScreenIndex - 1] = 33;
                            }
                        }
                    }
                }
            }

            if (true == colorReceived)
            {
                // Write the pixel data into our bitmap

                /*
                 * this.outputColorBitmap.WritePixels(
                 * new Int32Rect(0, 0, this.outputColorBitmap.PixelWidth, this.outputColorBitmap.PixelHeight),
                 * colorPixels,
                 * this.outputColorBitmap.PixelWidth * sizeof(int),
                 * 0);
                 *
                 * if (playerOpacityMaskImage == null)
                 * {
                 *  playerOpacityMaskImage = new WriteableBitmap(
                 *      640,
                 *      480,
                 *      96,
                 *      96,
                 *      PixelFormats.Bgra32,
                 *      null);
                 *
                 *  results[0] = playerOpacityMaskImage;
                 * }
                 *
                 * playerOpacityMaskImage.WritePixels(
                 *  new Int32Rect(0, 0, 640, 480),
                 *  greenScreenPixelData,
                 *  640 * ((playerOpacityMaskImage.Format.BitsPerPixel + 7) / 8),
                 *  0);
                 */
                results[0] = greenScreenPixelData; // playerOpacityMaskImage
                results[1] = colorPixels;
                return(results);
            }

            return(results);
        }
        private void ColorImageReady(object sender, ColorImageFrameReadyEventArgs e)
        {
            using (ColorImageFrame imageFrame = e.OpenColorImageFrame())
            {
                if (imageFrame != null)
                {
                    // We need to detect if the format has changed.
                    bool haveNewFormat = this.lastImageFormat != imageFrame.Format;
                    bool convertToRgb  = false;
                    int  bytesPerPixel = imageFrame.BytesPerPixel;

                    if (imageFrame.Format == ColorImageFormat.RawBayerResolution640x480Fps30 ||
                        imageFrame.Format == ColorImageFormat.RawBayerResolution1280x960Fps12)
                    {
                        convertToRgb  = true;
                        bytesPerPixel = 4;
                    }

                    if (haveNewFormat)
                    {
                        if (convertToRgb)
                        {
                            this.rawPixelData = new byte[imageFrame.PixelDataLength];
                            this.pixelData    = new byte[bytesPerPixel * imageFrame.Width * imageFrame.Height];
                        }
                        else
                        {
                            this.pixelData = new byte[imageFrame.PixelDataLength];
                        }
                    }

                    if (convertToRgb)
                    {
                        imageFrame.CopyPixelDataTo(this.rawPixelData);
                        ConvertBayerToRgb32(imageFrame.Width, imageFrame.Height);
                    }
                    else
                    {
                        imageFrame.CopyPixelDataTo(this.pixelData);
                    }

                    // A WriteableBitmap is a WPF construct that enables resetting the Bits of the image.
                    // This is more efficient than creating a new Bitmap every frame.
                    if (haveNewFormat)
                    {
                        PixelFormat format = PixelFormats.Bgr32;
                        if (imageFrame.Format == ColorImageFormat.InfraredResolution640x480Fps30)
                        {
                            format = PixelFormats.Gray16;
                        }

                        kinectColorImage.Visibility = Visibility.Visible;
                        this.outputImage            = new WriteableBitmap(
                            imageFrame.Width,
                            imageFrame.Height,
                            96,  // DpiX
                            96,  // DpiY
                            format,
                            null);

                        this.kinectColorImage.Source = this.outputImage;
                    }

                    this.outputImage.WritePixels(
                        new Int32Rect(0, 0, imageFrame.Width, imageFrame.Height),
                        this.pixelData,
                        imageFrame.Width * bytesPerPixel,
                        0);

                    this.lastImageFormat = imageFrame.Format;

                    UpdateFrameRate();
                }
            }
        }
예제 #9
0
        void kinect_AllFramesReady(object sender, AllFramesReadyEventArgs imageFrames)
        {
            //
            //Color Frame
            //
            ColorImageFrame colorVideoFrame = imageFrames.OpenColorImageFrame();

            if (colorVideoFrame != null)
            {
                //Create array for pixel data and copy it from the image frame
                Byte[] pixelData = new Byte[colorVideoFrame.PixelDataLength];
                colorVideoFrame.CopyPixelDataTo(pixelData);

                //Convert RGBA to BGRA
                Byte[] bgraPixelData = new Byte[colorVideoFrame.PixelDataLength];
                for (int i = 0; i < pixelData.Length; i += 4)
                {
                    bgraPixelData[i]     = pixelData[i + 2];
                    bgraPixelData[i + 1] = pixelData[i + 1];
                    bgraPixelData[i + 2] = pixelData[i];
                    bgraPixelData[i + 3] = (Byte)255; //The video comes with 0 alpha so it is transparent
                }

                // Create a texture and assign the realigned pixels
                colorVideo = new Texture2D(graphics.GraphicsDevice, colorVideoFrame.Width, colorVideoFrame.Height);
                colorVideo.SetData(bgraPixelData);
            }

            //
            // Skeleton Frame
            //
            using (SkeletonFrame skeletonFrame = imageFrames.OpenSkeletonFrame())
            {
                if (skeletonFrame != null)
                {
                    if ((skeletonData == null) || (this.skeletonData.Length != skeletonFrame.SkeletonArrayLength))
                    {
                        this.skeletonData = new Skeleton[skeletonFrame.SkeletonArrayLength];
                    }

                    //Copy the skeleton data to our array
                    skeletonFrame.CopySkeletonDataTo(this.skeletonData);
                }
            }
            if (skeletonData != null)
            {
                foreach (Skeleton skel in skeletonData)
                {
                    if (skel.TrackingState == SkeletonTrackingState.Tracked)
                    {
                        skeleton = skel;
                    }
                }
            }


            DepthImageFrame depthVideoFrame = imageFrames.OpenDepthImageFrame();

            //short[] pixelData = new short[depthVideoFrame.PixelDataLength];
            if (depthVideoFrame != null)
            {
                short[] pixelData = new short[depthVideoFrame.PixelDataLength];
                depthVideoFrame.CopyPixelDataTo(pixelData);
                depthVideo = new Texture2D(graphics.GraphicsDevice, depthVideoFrame.Width, depthVideoFrame.Height);
                depthVideo.SetData(ConvertDepthFrame(pixelData, kinect.DepthStream));
            }
        }
예제 #10
0
        void sensor_AllFramesReady(object sender, AllFramesReadyEventArgs e)
        {
            if ((playback || recordSet) && countdown < 90)
            {
                if (countdown == 0)
                {
                    Countdown.Text = "3";
                }
                else if (countdown == 30)
                {
                    Countdown.Text = "2";
                }
                else if (countdown == 60)
                {
                    Countdown.Text = "1";
                }
                countdown++;
                return;
            }
            else
            {
                Countdown.Text = "";
                if (playback)
                {
                    changeVisibility(System.Windows.Visibility.Visible);
                }
            }


            if (playback == true)
            {
                using (ColorImageFrame colorFrame = e.OpenColorImageFrame())
                {
                    if (colorFrame == null)
                    {
                        return;
                    }
                    playbackFrameNumber++;
                    byte[] pixels = new byte[colorFrame.PixelDataLength];
                    dataStream.Read(pixels, 0, colorFrame.PixelDataLength);
                    int stride = colorFrame.Width * 4;
                    masterView.Source = BitmapSource.Create(colorFrame.Width, colorFrame.Height, 96, 96, PixelFormats.Bgr32, null, pixels, stride);


                    if (totalCounted != 0)
                    {
                        String playerFeedback;
                        if (playbackFrameNumber % 30 == 0)
                        {
                            playerPercentage = Math.Round((totalCorrespondence / totalCounted) * 100);
                            if (playerPercentage < 20)
                            {
                                playerFeedback = "You're terrible";
                            }
                            else if (playerPercentage < 40)
                            {
                                playerFeedback = "You could do better";
                            }
                            else if (playerPercentage < 60)
                            {
                                playerFeedback = "Getting there";
                            }
                            else if (playerPercentage < 80)
                            {
                                playerFeedback = "Not bad";
                            }
                            else if (playerPercentage < 100)
                            {
                                playerFeedback = "Great job!";
                            }
                            else
                            {
                                playerFeedback = "Are you even real?";
                            }

                            /*
                             * else
                             * {
                             *  playerFeedback = playerPercentage.ToString() + "%";
                             * }
                             */

                            // update display
                            if (!textBlock3.Text.Equals(playerFeedback))
                            {
                                //speech.Speak(playerFeedback);
                                textBlock3.Text = playerFeedback;
                            }
                            textPercentage.Text = playerPercentage.ToString() + "%";
                        }
                    }

                    if (dataStream.Position == streamLength)
                    {
                        countdown = 0;
                        playback  = false;
                        dataStream.Close();
                        // swap image
                        BitmapImage bitmap = new BitmapImage();
                        bitmap.BeginInit();
                        bitmap.UriSource = new Uri("/VirtualSifu;component/Images/play.png", UriKind.Relative);
                        bitmap.EndInit();
                        image4.Stretch = Stretch.Fill;
                        image4.Source  = bitmap;

                        // undim record button
                        bitmap = new BitmapImage();
                        bitmap.BeginInit();
                        bitmap.UriSource = new Uri("/VirtualSifu;component/Images/record.png", UriKind.Relative);
                        bitmap.EndInit();
                        image2.Stretch = Stretch.Fill;
                        image2.Source  = bitmap;

                        masterView.Source = null;
                        changeVisibility(System.Windows.Visibility.Hidden);
                    }



                    using (SkeletonFrame skeletonFrame = e.OpenSkeletonFrame())
                    {
                        if (skeletonFrame != null)
                        {
                            Skeleton[] data = new Skeleton[skeletonFrame.SkeletonArrayLength];
                            skeletonFrame.CopySkeletonDataTo(data);

                            foreach (Skeleton skeleton in data)
                            {
                                if (skeleton.TrackingState == SkeletonTrackingState.Tracked)
                                {
                                    SkeletonPoint studentPoint;
                                    foreach (String joint in jointsTracked)
                                    {
                                        studentPoint = getJoint(joint, skeleton).Position;
                                        ((ArrayList)studentData.Get(joint)).Insert(playbackFrameNumber % 30, new JointData(studentPoint.X, studentPoint.Y, studentPoint.Z));
                                    }

                                    ArrayList dtwData = new ArrayList();

                                    if (playbackFrameNumber != 0 && playbackFrameNumber % 30 == 0)
                                    {
                                        //run DTW for each joint

                                        dtwData = runDTW3();
                                        Console.Write((double)dtwData[0] + "\n");
                                        colorJoint(ankleRight, (double)dtwData[0]);
                                        colorJoint(ankleLeft, (double)dtwData[1]);
                                        colorJoint(kneeRight, (double)dtwData[2]);
                                        colorJoint(kneeLeft, (double)dtwData[3]);
                                        colorJoint(hipRight, (double)dtwData[4]);
                                        colorJoint(hipLeft, (double)dtwData[5]);
                                        colorJoint(shoulderRight, (double)dtwData[6]);
                                        colorJoint(shoulderLeft, (double)dtwData[7]);
                                        colorJoint(elbowRight, (double)dtwData[8]);
                                        colorJoint(elbowLeft, (double)dtwData[9]);
                                        colorJoint(wristRight, (double)dtwData[10]);
                                        colorJoint(wristLeft, (double)dtwData[11]);

                                        //colorJoint(ellipse, random.Next(0, 4));
                                        //Probably can do this part like Gina's
                                        //Get a joint list that you want calculated
                                        //perform runDTW on each individual joint
                                        //then do your coloring/drawing for it.
                                    }


                                    ScalePosition(wristRight, skeleton.Joints[JointType.WristRight]);
                                    ScalePosition(wristLeft, skeleton.Joints[JointType.WristLeft]);
                                    ScalePosition(elbowRight, skeleton.Joints[JointType.ElbowRight]);
                                    ScalePosition(elbowLeft, skeleton.Joints[JointType.ElbowLeft]);
                                    ScalePosition(shoulderRight, skeleton.Joints[JointType.ShoulderRight]);
                                    ScalePosition(shoulderLeft, skeleton.Joints[JointType.ShoulderLeft]);
                                    ScalePosition(ankleRight, skeleton.Joints[JointType.AnkleRight]);
                                    ScalePosition(ankleLeft, skeleton.Joints[JointType.AnkleLeft]);
                                    ScalePosition(kneeRight, skeleton.Joints[JointType.KneeRight]);
                                    ScalePosition(kneeLeft, skeleton.Joints[JointType.KneeLeft]);
                                    ScalePosition(hipRight, skeleton.Joints[JointType.HipRight]);
                                    ScalePosition(hipLeft, skeleton.Joints[JointType.HipLeft]);
                                    GetCameraPoint(skeleton, e);
                                }
                            }
                        }
                    }
                }
            }

            if (recordSet == true)
            {
                using (ColorImageFrame colorFrame = e.OpenColorImageFrame())
                {
                    if (colorFrame == null)
                    {
                        return;
                    }
                    frameNumber++;
                    byte[] pixels = new byte[colorFrame.PixelDataLength];
                    colorFrame.CopyPixelDataTo(pixels);

                    dataStream.Write(pixels, 0, colorFrame.PixelDataLength);

                    int stride = colorFrame.Width * 4;
                    masterView.Source = BitmapSource.Create(colorFrame.Width, colorFrame.Height, 96, 96, PixelFormats.Bgr32, null, pixels, stride);
                }
                using (SkeletonFrame skeletonFrame = e.OpenSkeletonFrame())
                {
                    if (skeletonFrame != null)
                    {
                        Console.Write(masterLeftWristData.Count);
                        Skeleton[] data = new Skeleton[skeletonFrame.SkeletonArrayLength];
                        skeletonFrame.CopySkeletonDataTo(data);

                        foreach (Skeleton skeleton in data)
                        {
                            if (skeleton.TrackingState == SkeletonTrackingState.Tracked)
                            {
                                SkeletonPoint point = skeleton.Joints[JointType.Head].Position;


                                foreach (String joint in jointsTracked)
                                {
                                    point = getJoint(joint, skeleton).Position;
                                    writer.Write(joint + ": " + point.X + " " + point.Y + " " + point.Z + "\r\n");
                                }

                                writer.Write("\r\n");


                                //Somewhere after all this code has run, we need to finish construction of
                                //our FileStreamReader for Student [and master?]
                            }
                        }
                    }
                }
            }
        }
예제 #11
0
        private void SensorColorFrameReady(object sender, ColorImageFrameReadyEventArgs e)
        {
            Hsv lowerLimit = new Hsv(40, 100, 100);
            Hsv upperLimit = new Hsv(80, 255, 255);

            using (ColorImageFrame colorFrame = e.OpenColorImageFrame())
            {
                if (colorFrame != null)
                {
                    byte[] datosColor = new byte[colorFrame.PixelDataLength];

                    colorFrame.CopyPixelDataTo(datosColor);

                    System.Drawing.Bitmap bmp             = Helper.ImageToBitmap(colorFrame);
                    Image <Hsv, Byte>     currentFrameHSV = new Image <Hsv, byte>(bmp);
                    // Copy the pixel data from the image to a temporary array

                    Image <Gray, Byte> grayFrame = currentFrameHSV.Convert <Gray, Byte>();

                    Image <Gray, Byte> imageHSVDest = currentFrameHSV.InRange(lowerLimit, upperLimit);
                    //imageHSVDest.Erode(200);
                    VectorOfVectorOfPoint vectorOfPoint = Helper.FindContours(imageHSVDest);
                    //VectorOfPointF vf = new VectorOfPointF();
                    for (int i = 0; i < vectorOfPoint.Size; i++)
                    {
                        var contour = vectorOfPoint[i];
                        var area    = CvInvoke.ContourArea(contour);
                        if (area > 100)
                        {
                            System.Drawing.Rectangle rec = CvInvoke.BoundingRectangle(contour);
                            Point p1 = new Point(rec.X, rec.Y);
                            Point p2 = new Point(rec.X + rec.Width, rec.Y + rec.Height);
                            ObjetoX = (p1.X + p2.X) / 2;
                            ObjetoY = (p1.Y + p2.Y) / 2;
                            //rect = new Rect(p1, p2);

                            Console.WriteLine($"x: {ObjetoX} y: {ObjetoY}");
                            //currentFrame.Draw(rec, new Bgr(0, double.MaxValue, 0), 3);
                        }
                    }
                    colorStream.Source = BitmapSource.Create(
                        colorFrame.Width, colorFrame.Height,
                        96,
                        96,
                        PixelFormats.Bgr32,
                        null,
                        datosColor,
                        colorFrame.Width * colorFrame.BytesPerPixel
                        );


                    //colorFrame.CopyPixelDataTo(this.colorPixels);
                    //// Write the pixel data into our bitmap
                    //this.colorBitmap.WritePixels(
                    //    new Int32Rect(0, 0, this.colorBitmap.PixelWidth, this.colorBitmap.PixelHeight),
                    //    this.colorPixels,
                    //    this.colorBitmap.PixelWidth * sizeof(int),
                    //    0);
                }
            }
        }
예제 #12
0
        void KinectFaceNode_AllFrameReady(object sender, AllFramesReadyEventArgs e)
        {
            ColorImageFrame colorImageFrame = null;
            DepthImageFrame depthImageFrame = null;
            SkeletonFrame   skeletonFrame   = null;

            if (face == null)
            {
                face = new FaceTracker(this.runtime.Runtime);
            }

            colorImageFrame = e.OpenColorImageFrame();
            depthImageFrame = e.OpenDepthImageFrame();
            skeletonFrame   = e.OpenSkeletonFrame();

            if (colorImageFrame == null || depthImageFrame == null || skeletonFrame == null)
            {
                return;
            }

            if (this.depthImage == null)
            {
                this.depthImage = new short[depthImageFrame.PixelDataLength];
            }

            if (this.colorImage == null)
            {
                this.colorImage = new byte[colorImageFrame.PixelDataLength];
            }

            if (this.skeletonData == null || this.skeletonData.Length != skeletonFrame.SkeletonArrayLength)
            {
                this.skeletonData = new Skeleton[skeletonFrame.SkeletonArrayLength];
            }

            colorImageFrame.CopyPixelDataTo(this.colorImage);
            depthImageFrame.CopyPixelDataTo(this.depthImage);
            skeletonFrame.CopySkeletonDataTo(this.skeletonData);

            foreach (Skeleton skeleton in this.skeletonData)
            {
                if (skeleton.TrackingState == SkeletonTrackingState.Tracked ||
                    skeleton.TrackingState == SkeletonTrackingState.PositionOnly)
                {
                    // We want keep a record of any skeleton, tracked or untracked.
                    if (!this.trackedSkeletons.ContainsKey(skeleton.TrackingId))
                    {
                        this.trackedSkeletons.Add(skeleton.TrackingId, new SkeletonFaceTracker());
                    }

                    // Give each tracker the upated frame.
                    SkeletonFaceTracker skeletonFaceTracker;
                    if (this.trackedSkeletons.TryGetValue(skeleton.TrackingId, out skeletonFaceTracker))
                    {
                        skeletonFaceTracker.OnFrameReady(this.runtime.Runtime, ColorImageFormat.RgbResolution640x480Fps30, colorImage, DepthImageFormat.Resolution320x240Fps30, depthImage, skeleton);
                        skeletonFaceTracker.LastTrackedFrame = skeletonFrame.FrameNumber;
                    }
                }
            }

            this.RemoveOldTrackers(skeletonFrame.FrameNumber);

            colorImageFrame.Dispose();
            depthImageFrame.Dispose();
            skeletonFrame.Dispose();

            this.FInvalidate = true;
        }
예제 #13
0
        private void Sensor_AllFrameReady(object sender, AllFramesReadyEventArgs e)
        {
            //ColorImageFrame
            ColorImageFrame colorImageFrame = e.OpenColorImageFrame();

            if (colorImageFrame == null)
            {
                return;
            }
            byte[] colorDataArray = new byte[colorImageFrame.PixelDataLength];
            colorImageFrame.CopyPixelDataTo(colorDataArray);
            //ColorImageFrame
            colorImageFrame.Dispose();
            //DepthImageFrame
            DepthImageFrame depthImageFrame = e.OpenDepthImageFrame();

            if (depthImageFrame == null)
            {
                return;
            }
            //(PixelDataLength = 640*480)
            DepthImagePixel[] depthImgPixArray = new DepthImagePixel[depthImageFrame.PixelDataLength];
            depthImageFrame.CopyDepthImagePixelDataTo(depthImgPixArray);
            //ColorImageFrame
            depthImageFrame.Dispose();
            byte[]            userColorArray = new byte[_sensor.ColorStream.FramePixelDataLength];
            ColorImagePoint[] colImgPntArray
                = new ColorImagePoint[_sensor.DepthStream.FrameHeight *
                                      _sensor.DepthStream.FrameWidth];
            //colorImagePoints
            _sensor.CoordinateMapper.MapDepthFrameToColorFrame(depthImageFrame.Format,
                                                               depthImgPixArray, colorImageFrame.Format, colImgPntArray);
            for (int i = 0; i < depthImgPixArray.Length; i++)
            {
                if (depthImgPixArray[i].PlayerIndex == 0)
                {
                    continue;
                }
                ColorImagePoint colorImagePoint = colImgPntArray[i];
                if (colorImagePoint.X >= _sensor.ColorStream.FrameWidth || colorImagePoint.X < 0 ||
                    colorImagePoint.Y >= _sensor.ColorStream.FrameHeight || colorImagePoint.Y < 0)
                {
                    continue;
                }
                int colorDataIndex =
                    ((colorImagePoint.Y * _sensor.ColorStream.FrameWidth) + colorImagePoint.X)
                    * _sensor.ColorStream.FrameBytesPerPixel;
                userColorArray[colorDataIndex]     = colorDataArray[colorDataIndex];
                userColorArray[colorDataIndex + 1] = colorDataArray[colorDataIndex + 1];
                userColorArray[colorDataIndex + 2] = colorDataArray[colorDataIndex + 2];
                userColorArray[colorDataIndex + 3] = 255;
                userColorArray[colorDataIndex - _sensor.ColorStream.FrameBytesPerPixel]
                    = colorDataArray[colorDataIndex - _sensor.ColorStream.FrameBytesPerPixel];
                userColorArray[colorDataIndex - _sensor.ColorStream.FrameBytesPerPixel + 1]
                    = colorDataArray[colorDataIndex - _sensor.ColorStream.FrameBytesPerPixel + 1];
                userColorArray[colorDataIndex - _sensor.ColorStream.FrameBytesPerPixel + 2]
                    = colorDataArray[colorDataIndex - _sensor.ColorStream.FrameBytesPerPixel + 2];
                userColorArray[colorDataIndex - _sensor.ColorStream.FrameBytesPerPixel + 3] = 255;
            }
            BitmapSource bitmapSource = BitmapSource.Create(
                _sensor.ColorStream.FrameWidth,
                _sensor.ColorStream.FrameHeight,
                96,                  //dpi
                96,                  //dpi
                PixelFormats.Bgra32, //
                null,
                userColorArray,
                _sensor.ColorStream.FrameWidth *
                _sensor.ColorStream.FrameBytesPerPixel);

            image1.Source = bitmapSource;
        }
예제 #14
0
        /// <summary>
        /// Event handler for Kinect sensor's DepthFrameReady event
        /// </summary>
        /// <param name="sender">object sending the event</param>
        /// <param name="e">event arguments</param>
        private void SensorAllFramesReady(object sender, AllFramesReadyEventArgs e)
        {
            // in the middle of shutting down, so nothing to do
            if (null == this.sensor)
            {
                return;
            }

            bool depthReceived = false;
            bool colorReceived = false;

            using (DepthImageFrame depthFrame = e.OpenDepthImageFrame())
            {
                if (null != depthFrame)
                {
                    // Copy the pixel data from the image to a temporary array
                    depthFrame.CopyDepthImagePixelDataTo(this.depthPixels);

                    depthReceived = true;
                }
            }

            Skeleton[] skeletons = new Skeleton[0];

            using (SkeletonFrame skeletonFrame = e.OpenSkeletonFrame())
            {
                if (skeletonFrame != null)
                {
                    skeletons = new Skeleton[skeletonFrame.SkeletonArrayLength];
                    skeletonFrame.CopySkeletonDataTo(skeletons);
                }
            }

            using (DrawingContext dc = this.drawingGroup.Open())
            {
                // Draw a transparent background to set the render size
                dc.DrawRectangle(Brushes.Transparent, null, new Rect(0.0, 0.0, RenderWidth, RenderHeight));

                if (skeletons.Length != 0)
                {
                    foreach (Skeleton skel in skeletons)
                    {
                        if (skel.TrackingState == SkeletonTrackingState.Tracked)
                        {
                            this.seleccion_sonido(skel);
                        }
                        else if (skel.TrackingState == SkeletonTrackingState.PositionOnly)
                        {
                            dc.DrawEllipse(
                                this.centerPointBrush,
                                null,
                                this.SkeletonPointToScreen(skel.Position),
                                BodyCenterThickness,
                                BodyCenterThickness);
                        }
                    }
                }

                // prevent drawing outside of our render area
                this.drawingGroup.ClipGeometry = new RectangleGeometry(new Rect(0.0, 0.0, RenderWidth, RenderHeight));
            }

            //Llamamos a la función que controla los estados por los que hay que pasar
            detectar_estado(skeletons);

            using (ColorImageFrame colorFrame = e.OpenColorImageFrame())
            {
                if (null != colorFrame)
                {
                    // Copy the pixel data from the image to a temporary array
                    colorFrame.CopyPixelDataTo(this.colorPixels);

                    colorReceived = true;
                }
            }

            // do our processing outside of the using block
            // so that we return resources to the kinect as soon as possible
            if (true == depthReceived)
            {
                this.sensor.CoordinateMapper.MapDepthFrameToColorFrame(
                    DepthFormat,
                    this.depthPixels,
                    ColorFormat,
                    this.colorCoordinates);

                Array.Clear(this.playerPixelData, 0, this.playerPixelData.Length);

                // loop over each row and column of the depth
                for (int y = 0; y < this.depthHeight; ++y)
                {
                    for (int x = 0; x < this.depthWidth; ++x)
                    {
                        // calculate index into depth array
                        int depthIndex = x + (y * this.depthWidth);

                        DepthImagePixel depthPixel = this.depthPixels[depthIndex];

                        int player = depthPixel.PlayerIndex;

                        // if we're tracking a player for the current pixel, sets it opacity to full
                        if (player > 0)
                        {
                            // retrieve the depth to color mapping for the current depth pixel
                            ColorImagePoint colorImagePoint = this.colorCoordinates[depthIndex];

                            // scale color coordinates to depth resolution
                            int colorInDepthX = colorImagePoint.X / this.colorToDepthDivisor;
                            int colorInDepthY = colorImagePoint.Y / this.colorToDepthDivisor;

                            // make sure the depth pixel maps to a valid point in color space
                            // check y > 0 and y < depthHeight to make sure we don't write outside of the array
                            // check x > 0 instead of >= 0 since to fill gaps we set opaque current pixel plus the one to the left
                            // because of how the sensor works it is more correct to do it this way than to set to the right
                            if (colorInDepthX > 0 && colorInDepthX < this.depthWidth && colorInDepthY >= 0 && colorInDepthY < this.depthHeight)
                            {
                                // calculate index into the player mask pixel array
                                int playerPixelIndex = colorInDepthX + (colorInDepthY * this.depthWidth);

                                // set opaque
                                this.playerPixelData[playerPixelIndex] = opaquePixelValue;

                                // compensate for depth/color not corresponding exactly by setting the pixel
                                // to the left to opaque as well
                                this.playerPixelData[playerPixelIndex - 1] = opaquePixelValue;
                            }
                        }
                    }
                }
            }

            // do our processing outside of the using block
            // so that we return resources to the kinect as soon as possible
            if (true == colorReceived)
            {
                // Write the pixel data into our bitmap
                this.colorBitmap.WritePixels(
                    new Int32Rect(0, 0, this.colorBitmap.PixelWidth, this.colorBitmap.PixelHeight),
                    this.colorPixels,
                    this.colorBitmap.PixelWidth * sizeof(int),
                    0);

                if (this.playerOpacityMaskImage == null)
                {
                    this.playerOpacityMaskImage = new WriteableBitmap(
                        this.depthWidth,
                        this.depthHeight,
                        96,
                        96,
                        PixelFormats.Bgra32,
                        null);

                    MaskedColor.OpacityMask = new ImageBrush {
                        ImageSource = this.playerOpacityMaskImage
                    };
                }

                this.playerOpacityMaskImage.WritePixels(
                    new Int32Rect(0, 0, this.depthWidth, this.depthHeight),
                    this.playerPixelData,
                    this.depthWidth * ((this.playerOpacityMaskImage.Format.BitsPerPixel + 7) / 8),
                    0);
            }
        }
예제 #15
0
        void KinectFaceNode_AllFrameReady(object sender, AllFramesReadyEventArgs e)
        {
            ColorImageFrame colorImageFrame = null;
            DepthImageFrame depthImageFrame = null;
            SkeletonFrame   skeletonFrame   = null;

            colorImageFrame = e.OpenColorImageFrame();
            depthImageFrame = e.OpenDepthImageFrame();
            skeletonFrame   = e.OpenSkeletonFrame();

            if (colorImageFrame == null || depthImageFrame == null || skeletonFrame == null)
            {
                if (colorImageFrame != null)
                {
                    colorImageFrame.Dispose();
                }
                if (depthImageFrame != null)
                {
                    depthImageFrame.Dispose();
                }
                if (skeletonFrame != null)
                {
                    skeletonFrame.Dispose();
                }
                return;
            }

            if (first)
            {
                first         = false;
                this.olddepth = depthImageFrame.Format;
            }
            else
            {
                if (this.olddepth != depthImageFrame.Format)
                {
                    //Need a reset
                    if (this.depthImage != null)
                    {
                        this.depthImage = null;
                    }

                    foreach (SkeletonFaceTracker sft in this.trackedSkeletons.Values)
                    {
                        sft.Dispose();
                    }

                    this.trackedSkeletons.Clear();
                    this.olddepth = depthImageFrame.Format;
                }
            }

            if (this.depthImage == null)
            {
                this.depthImage = new short[depthImageFrame.PixelDataLength];
            }

            if (this.colorImage == null)
            {
                this.colorImage = new byte[colorImageFrame.PixelDataLength];
            }

            if (this.skeletonData == null || this.skeletonData.Length != skeletonFrame.SkeletonArrayLength)
            {
                this.skeletonData = new Skeleton[skeletonFrame.SkeletonArrayLength];
            }

            colorImageFrame.CopyPixelDataTo(this.colorImage);
            depthImageFrame.CopyPixelDataTo(this.depthImage);
            skeletonFrame.CopySkeletonDataTo(this.skeletonData);

            foreach (Skeleton skeleton in this.skeletonData)
            {
                if (skeleton.TrackingState == SkeletonTrackingState.Tracked ||
                    skeleton.TrackingState == SkeletonTrackingState.PositionOnly)
                {
                    // We want keep a record of any skeleton, tracked or untracked.
                    if (!this.trackedSkeletons.ContainsKey(skeleton.TrackingId))
                    {
                        this.trackedSkeletons.Add(skeleton.TrackingId, new SkeletonFaceTracker());
                    }

                    // Give each tracker the upated frame.
                    SkeletonFaceTracker skeletonFaceTracker;
                    if (this.trackedSkeletons.TryGetValue(skeleton.TrackingId, out skeletonFaceTracker))
                    {
                        skeletonFaceTracker.OnFrameReady(this.runtime.Runtime, colorImageFrame.Format, colorImage, depthImageFrame.Format, depthImage, skeleton);
                        skeletonFaceTracker.LastTrackedFrame = skeletonFrame.FrameNumber;
                    }
                }
            }

            this.RemoveOldTrackers(skeletonFrame.FrameNumber);

            colorImageFrame.Dispose();
            depthImageFrame.Dispose();
            skeletonFrame.Dispose();

            this.FInvalidate = true;
        }
예제 #16
0
        //corre cada que hay un nuevo frame
        void sensor_AllFramesReady(object sender, AllFramesReadyEventArgs e)
        {
            //dibuja la camara rgb
            using (ColorImageFrame colorFrame = e.OpenColorImageFrame()) {
                if (colorFrame == null)
                {
                    return;
                }

                byte[] colorData = new byte[colorFrame.PixelDataLength];
                colorFrame.CopyPixelDataTo(colorData);

                kinectVideo.Source = BitmapSource.Create(colorFrame.Width,
                                                         colorFrame.Height,
                                                         96,
                                                         96,
                                                         PixelFormats.Bgr32,
                                                         null,
                                                         colorData,
                                                         colorFrame.Width * colorFrame.BytesPerPixel);
            }

            using (SkeletonFrame skeletonFrame = e.OpenSkeletonFrame()) {
                //salir si no hay frame
                if (skeletonFrame == null)
                {
                    return;
                }

                //si cambian los esqueletos actualizar la variable
                if (skeletons == null ||
                    skeletons.Length != skeletonFrame.SkeletonArrayLength)
                {
                    skeletons = new Skeleton[skeletonFrame.SkeletonArrayLength];
                }
                //copiar esqueletos a la matriz
                skeletonFrame.CopySkeletonDataTo(skeletons);

                // seleccionar el esqueleto mas cercano
                Skeleton closestSkeleton = (from s in skeletons
                                            where s.TrackingState == SkeletonTrackingState.Tracked &&
                                            s.Joints[JointType.Head].TrackingState == JointTrackingState.Tracked
                                            select s).OrderBy(s => s.Joints[JointType.Head].Position.Z)
                                           .FirstOrDefault();
                //si no hay esqueleto cercano salir
                if (closestSkeleton == null)
                {
                    return;
                }

                //asignar los joints a una variable para facil identificacion
                Joint head      = closestSkeleton.Joints[JointType.Head];
                Joint rightHand = closestSkeleton.Joints[JointType.HandRight];
                Joint leftHand  = closestSkeleton.Joints[JointType.HandLeft];

                //si no se detectan bien los 3 joints salir
                if (head.TrackingState != JointTrackingState.Tracked ||
                    rightHand.TrackingState != JointTrackingState.Tracked ||
                    leftHand.TrackingState != JointTrackingState.Tracked)
                {
                    //Don't have a good read on the joints so we cannot process gestures
                    return;
                }

                //actualizar la posicion de las elipses
                SetEllipsePosition(ellipseHead, head, false);
                SetEllipsePosition(ellipseLeftHand, leftHand, isBackGestureActive);
                SetEllipsePosition(ellipseRightHand, rightHand, isForwardGestureActive);

                //checar si se activa el gesto o no
                ProcessForwardBackGesture(head, rightHand, leftHand);
            }
        }
예제 #17
0
        private void OnAllFramesReady(object sender, AllFramesReadyEventArgs allFramesReadyEventArgs)
        {
            ColorImageFrame colorImageFrame = null;
            DepthImageFrame depthImageFrame = null;
            SkeletonFrame   skeletonFrame   = null;

            try
            {
                colorImageFrame = allFramesReadyEventArgs.OpenColorImageFrame();
                depthImageFrame = allFramesReadyEventArgs.OpenDepthImageFrame();
                skeletonFrame   = allFramesReadyEventArgs.OpenSkeletonFrame();

                if (colorImageFrame == null || depthImageFrame == null || skeletonFrame == null)
                {
                    return;
                }

                // Check for image format changes.  The FaceTracker doesn't
                // deal with that so we need to reset.
                if (this.depthImageFormat != depthImageFrame.Format)
                {
                    this.ResetFaceTracking();
                    this.depthImage       = null;
                    this.depthImageFormat = depthImageFrame.Format;
                }

                if (this.colorImageFormat != colorImageFrame.Format)
                {
                    this.ResetFaceTracking();
                    this.colorImage       = null;
                    this.colorImageFormat = colorImageFrame.Format;
                }

                // Create any buffers to store copies of the data we work with
                if (this.depthImage == null)
                {
                    this.depthImage = new short[depthImageFrame.PixelDataLength];
                }

                if (this.colorImage == null)
                {
                    this.colorImage = new byte[colorImageFrame.PixelDataLength];
                }

                // Get the skeleton information
                if (this.skeletonData == null || this.skeletonData.Length != skeletonFrame.SkeletonArrayLength)
                {
                    this.skeletonData = new Skeleton[skeletonFrame.SkeletonArrayLength];
                }

                colorImageFrame.CopyPixelDataTo(this.colorImage);
                depthImageFrame.CopyPixelDataTo(this.depthImage);
                skeletonFrame.CopySkeletonDataTo(this.skeletonData);

                // Update the list of trackers and the trackers with the current frame information
                var i = 0;

                foreach (Skeleton skeleton in this.skeletonData)
                {
                    if (skeleton.TrackingState == SkeletonTrackingState.Tracked ||
                        skeleton.TrackingState == SkeletonTrackingState.PositionOnly)
                    {
                        // We want keep a record of any skeleton, tracked or untracked.
                        if (!this.trackedSkeletons.ContainsKey(skeleton.TrackingId))
                        {
                            this.trackedSkeletons.Add(skeleton.TrackingId, new SkeletonFaceTracker());
                        }

                        var position = skeleton.Position;
                        if (position.X != 0 && position.Y != 0 && position.Z != 0)
                        {
                            Console.WriteLine($"Face {i}: X {position.X}, Y {position.Y}, Z {position.Z}");
                            string text = position.X + "\r\n" + position.Y + "\r\n" + position.Z;
                            System.IO.File.WriteAllText(@"C:\Users\Mykayla\Desktop\transforms\coords_" + file_index + ".txt", text);
                            file_index++;
                        }
                        // Give each tracker the upated frame.
                        SkeletonFaceTracker skeletonFaceTracker;
                        if (this.trackedSkeletons.TryGetValue(skeleton.TrackingId, out skeletonFaceTracker))
                        {
                            skeletonFaceTracker.OnFrameReady(this.Kinect, colorImageFormat, colorImage, depthImageFormat, depthImage, skeleton);
                            skeletonFaceTracker.LastTrackedFrame = skeletonFrame.FrameNumber;
                        }
                    }
                    i += 1;
                }

                this.RemoveOldTrackers(skeletonFrame.FrameNumber);

                this.InvalidateVisual();
            }
            finally
            {
                if (colorImageFrame != null)
                {
                    colorImageFrame.Dispose();
                }

                if (depthImageFrame != null)
                {
                    depthImageFrame.Dispose();
                }

                if (skeletonFrame != null)
                {
                    skeletonFrame.Dispose();
                }
            }
        }
        private void AllFramesReady(object sender, AllFramesReadyEventArgs e)
        {
            using (ColorImageFrame colorFrame = e.OpenColorImageFrame())
                using (DepthImageFrame depthFrame = e.OpenDepthImageFrame())
                    using (SkeletonFrame skeletonFrame = e.OpenSkeletonFrame())
                    {
                        if (colorFrame == null || depthFrame == null || skeletonFrame == null)
                        {
                            ErrorWhileRecivingData(skeletonFrame, depthFrame, colorFrame);
                            return;
                        }
                        Skeleton[] skeletons = new Skeleton[skeletonFrame.SkeletonArrayLength];
                        skeletonFrame.CopySkeletonDataTo(skeletons);
                        short[,] depths = new short[depthFrame.Width, depthFrame.Height];
                        short[] fromDepthFrame = new short[depthFrame.Width * depthFrame.Height];
                        depthFrame.CopyPixelDataTo(fromDepthFrame);
                        DebuggingTable.LatestCreated["size"] = fromDepthFrame.Length.ToString();
                        for (int i = 0; i < fromDepthFrame.Length; i++)
                        {
                            depths[i / depthFrame.Height, i % depthFrame.Height] = fromDepthFrame[i];
                        }
                        byte[] colorPixels = new byte[colorFrame.PixelDataLength];
                        colorFrame.CopyPixelDataTo(colorPixels);

                        /*
                         * //background removal
                         * if (BackgroundRemoval)
                         * {
                         *  Skeleton skeleton = skeletons.FirstOrDefault();
                         *  int maxDepth = -1, minDepth = -1;
                         *  if (skeleton != default(Skeleton))
                         *  {
                         *      short max = skeleton.Joints.Max(
                         *          joint => depths[(int)(depthFrame.Width * joint.Position.X),
                         *          (int)(depthFrame.Height * joint.Position.Y)]);
                         *      short min = skeleton.Joints.Max(
                         *          joint => depths[(int)(depthFrame.Width * joint.Position.X),
                         *          (int)(depthFrame.Height * joint.Position.Y)]);
                         *      int diff = max - min;
                         *      maxDepth = max + diff;
                         *      minDepth = min - diff;
                         *  }
                         *  for (int x = 0; x < colorFrame.Width; x++)
                         *  {
                         *      for (int y = 0; y < colorFrame.Height; y++)
                         *      {
                         *          bool isProbablyPerson;
                         *          if (skeleton == default(Skeleton))
                         *          {
                         *              isProbablyPerson = false;
                         *          }
                         *          else
                         *          {
                         *              short depth = depths[x, y];
                         *              isProbablyPerson = depth > minDepth && depth < maxDepth;
                         *          }
                         *          colorPixels[(y * colorFrame.Width + x) * 4] = (byte)(isProbablyPerson ? 255 : 0);
                         *      }
                         *  }*/
                        DataRecieved(skeletons, depths, colorPixels);
                    }
        }
예제 #19
0
        private void run(object sender, AllFramesReadyEventArgs e)
        {
            //if (!takeFrame)
            using (ColorImageFrame frame_colour = e.OpenColorImageFrame())
                using (DepthImageFrame frame_depth = e.OpenDepthImageFrame())
                    using (SkeletonFrame frame_skel = e.OpenSkeletonFrame())
                    {
                        frames++;
                        if (frames % gap != 0)
                        {
                            //try { e.OpenColorImageFrame().Dispose(); }
                            //catch (Exception noFrameException) { };
                            takeFrame = true;
                            return;
                        }
                        else
                        {
                            takeFrame = false;
                        }

                        if (null != frame_colour)
                        {
                            byte[] rawColorImage = new byte[frame_colour.PixelDataLength];
                            frame_colour.CopyPixelDataTo(rawColorImage);
                            ProcessFrame(rawColorImage);

                            if (null != frame_depth)
                            {
                                DepthImagePixel[] depthImage = new DepthImagePixel[frame_depth.PixelDataLength];
                                frame_depth.CopyDepthImagePixelDataTo(depthImage);
                                int newX = Math.Max(x - 150, 0);
                                depth             = depthImage[x + (y * 640)].Depth;
                                lbl_depth.Content = depth;

                                /*
                                 * //double x_pos = Math.Round((2.2 * 2 * Math.Tan(57) * (this.x - 320))/ 640,    4);
                                 * double x_pos = 0.00425 * (this.x - 320);
                                 *
                                 * double y_pos = Math.Round((2.2 * 2 * Math.Tan(21.5) * (this.y - 240) * -1) / 480, 4);
                                 * double y_pos2 = (y_pos * -1) -0.45;
                                 * // +1.05
                                 * //Console.WriteLine("Depth: " + depth + ", " +
                                 * //label3.Content = "X/Y = " + x_pos + ", " + y_pos + " (" + y_pos2 + ")";
                                 *
                                 * double actual_y_skel = (y_pos2) * 1000; // -1.6
                                 * this.y_actual = actual_y_skel;
                                 * this.x_actual = x_pos;
                                 * //double actual_x_skel = (x_pos - 2.2) * 1000;
                                 */
                                //double x_pos = Math.Round((2.2 * 2 * Math.Tan(57) * tempX) / 640, 4);
                                double x_pos = 0.00425 * (this.x - 320);

                                double y_pos = Math.Round((2.2 * 2 * Math.Tan(21.5) * (this.y - 240) * -1) / 480, 4);
                                //double y_pos2 = (y_pos * -1) + 1.05;
                                double y_pos2 = (y_pos * -1) - 0.155;
                                this.x_actual = x_pos;
                                this.y_actual = y_pos2;
                            }
                        }

                        processSkeleton(frame_skel);


                        //ProcessFrame(MaskedRGB);
                    }
        }
예제 #20
0
        private void _sensor_TakeSnapshot(object sender, AllFramesReadyEventArgs e)
        {
            using (ColorImageFrame colorFrame = e.OpenColorImageFrame())
            {
                if (colorFrame == null)
                {
                    return;
                }
                if (!takeSnap)
                {
                    return;
                }
                takeSnap = false;

                byte[] pixels = new byte[colorFrame.PixelDataLength];
                colorFrame.CopyPixelDataTo(pixels);

                BitmapSource image;

                if (irCam)
                {
                    int stride = colorFrame.Width * 2;                                                                                           //IR
                    image         = BitmapSource.Create(colorFrame.Width, colorFrame.Height, 96, 96, PixelFormats.Gray16, null, pixels, stride); //IR
                    image2.Source = image;
                }
                else
                {
                    int stride = colorFrame.Width * 4;                                                                                          // RGB
                    image         = BitmapSource.Create(colorFrame.Width, colorFrame.Height, 96, 96, PixelFormats.Bgr32, null, pixels, stride); //RGB
                    image2.Source = image;
                }



                int index = 0;
                label2.Content      = pixels.Length;
                label_blue.Content  = pixels[index + 0];
                label_green.Content = pixels[index + 1];
                label_red.Content   = pixels[index + 2];
                label_empty.Content = pixels[index + 3];

                label_Width.Content  = colorFrame.Width;
                label_height.Content = colorFrame.Height;

                String fName = string.Format("{0:HH:mm:ss tt}", DateTime.Now);

                String cfName = "";
                for (int i = 0; i < 8; i++)
                {
                    if (fName[i] != ':' && fName[i] != ' ')
                    {
                        cfName += fName[i];
                    }
                }

                label2.Content = cfName + ".txt";

                System.IO.StreamWriter file = new System.IO.StreamWriter(cfName + ".txt", true);
                file.Write(colorFrame.Width.ToString() + "#" + colorFrame.Height.ToString() + "#");
                for (int i = 0; i < pixels.Length; i++)
                {
                    if (i % 4 == 3)
                    {
                        continue;
                    }
                    string s = pixels[i].ToString();
                    file.Write(s + '$');
                }

                file.Close();

                FileStream       stream      = new FileStream(cfName + ".png", FileMode.Create);
                PngBitmapEncoder encoder     = new PngBitmapEncoder();
                TextBlock        myTextBlock = new TextBlock();
                myTextBlock.Text  = "Codec Author is: " + encoder.CodecInfo.Author.ToString();
                encoder.Interlace = PngInterlaceOption.On;
                encoder.Frames.Add(BitmapFrame.Create(image));
                encoder.Save(stream);

                stream.Close();

                //Image image = Image.FromStream(new MemoryStream(pixels));

                /*
                 * var fs = new BinaryWriter(new FileStream(@"C:\\tmp\\" + filename + ".ico", FileMode.Append, FileAccess.Write));
                 * fs.Write(imageByteArray);
                 * fs.Close();
                 */
                /*
                 * System.IO.MemoryStream ms = new System.IO.MemoryStream();
                 * image2.Save(ms, System.Drawing.Imaging.ImageFormat.Jpeg);
                 * byte[] ar = new byte[ms.Length];
                 * ms.Write(ar, 0, ar.Length);
                 *
                 */
            }


            _sensor.AllFramesReady += new EventHandler <AllFramesReadyEventArgs>(_sensor_TakeDepthshot);
        }
예제 #21
0
        /// <summary>
        /// RGBカメラ、距離カメラ、骨格のフレーム更新イベント
        /// </summary>
        /// <param name="sender"></param>
        /// <param name="e"></param>
        void kinect_AllFramesReady(object sender, AllFramesReadyEventArgs e)
        {
            try {
                // Kinectのインスタンスを取得する
                KinectSensor kinect = sender as KinectSensor;
                if (kinect == null)
                {
                    return;
                }

                // RGBカメラのフレームデータを取得する
                using (ColorImageFrame colorFrame = e.OpenColorImageFrame()) {
                    if (colorFrame != null)
                    {
                        // RGBカメラのピクセルデータを取得する
                        byte[] colorPixel = new byte[colorFrame.PixelDataLength];
                        colorFrame.CopyPixelDataTo(colorPixel);

                        // ピクセルデータをビットマップに変換する
                        Bitmap bitmap = new Bitmap(kinect.ColorStream.FrameWidth, kinect.ColorStream.FrameHeight,
                                                   System.Drawing.Imaging.PixelFormat.Format32bppRgb);

                        Rectangle  rect = new Rectangle(0, 0, bitmap.Width, bitmap.Height);
                        BitmapData data = bitmap.LockBits(rect, ImageLockMode.WriteOnly,
                                                          PixelFormat.Format32bppRgb);
                        Marshal.Copy(colorPixel, 0, data.Scan0, colorPixel.Length);
                        bitmap.UnlockBits(data);

                        pictureBoxRgb.Image = bitmap;
                    }
                }

                // 書き込み用のビットマップデータを作成(32bit bitmap)
                // 16bpp グレースケールは表示できない
                // 距離カメラのフレームデータを取得する
                using (DepthImageFrame depthFrame = e.OpenDepthImageFrame()) {
                    if (depthFrame != null)
                    {
                        // 距離データを画像化して表示
                        Bitmap bitmap = new Bitmap(kinect.DepthStream.FrameWidth, kinect.DepthStream.FrameHeight,
                                                   System.Drawing.Imaging.PixelFormat.Format32bppRgb);

                        Rectangle  rect = new Rectangle(0, 0, bitmap.Width, bitmap.Height);
                        BitmapData data = bitmap.LockBits(rect, ImageLockMode.WriteOnly,
                                                          System.Drawing.Imaging.PixelFormat.Format32bppRgb);
                        byte[] gray = ConvertDepthColor(kinect, depthFrame);
                        Marshal.Copy(gray, 0, data.Scan0, gray.Length);
                        bitmap.UnlockBits(data);

                        pictureBoxDepth.Image = bitmap;
                    }
                }

                // スケルトンのフレームを取得する
                using (SkeletonFrame skeletonFrame = e.OpenSkeletonFrame()) {
                    if (skeletonFrame != null)
                    {
                        Graphics g = Graphics.FromImage(pictureBoxRgb.Image);

                        // スケルトンのデータを取得する
                        Skeleton[] skeletons = new Skeleton[skeletonFrame.SkeletonArrayLength];
                        skeletonFrame.CopySkeletonDataTo(skeletons);

                        // トラッキングされているスケルトンのジョイントを描画する
                        foreach (var skeleton in skeletons)
                        {
                            // スケルトンがトラッキング状態(デフォルトモード)の場合は、ジョイントを描画する
                            if (skeleton.TrackingState == SkeletonTrackingState.Tracked)
                            {
                                // ジョイントを描画する
                                foreach (Joint joint in skeleton.Joints)
                                {
                                    // ジョイントがトラッキングされていなければ次へ
                                    if (joint.TrackingState != JointTrackingState.Tracked)
                                    {
                                        continue;
                                    }

                                    // スケルトンの座標を、RGBカメラの座標に変換して円を書く
                                    DrawEllipse(kinect, g, joint.Position);
                                }
                            }
                            // スケルトンが位置追跡(ニアモードの)の場合は、スケルトン位置(Center hip)を描画する
                            else if (skeleton.TrackingState == SkeletonTrackingState.PositionOnly)
                            {
                                // スケルトンの座標を、RGBカメラの座標に変換して円を書く
                                DrawEllipse(kinect, g, skeleton.Position);
                            }
                        }
                    }
                }
            }
            catch (Exception ex) {
                MessageBox.Show(ex.Message);
            }
        }
예제 #22
0
        public void OnAllFramesReady(object sender, AllFramesReadyEventArgs allFramesReadyEventArgs)
        {
            ColorImageFrame colorImageFrame = null;
            DepthImageFrame depthImageFrame = null;
            SkeletonFrame   skeletonFrame   = null;

            try
            {
                colorImageFrame = allFramesReadyEventArgs.OpenColorImageFrame();
                depthImageFrame = allFramesReadyEventArgs.OpenDepthImageFrame();
                skeletonFrame   = allFramesReadyEventArgs.OpenSkeletonFrame();

                if (colorImageFrame == null || depthImageFrame == null || skeletonFrame == null)
                {
                    return;
                }


                if (this.depthImageFormat != depthImageFrame.Format)
                {
                    this.ResetFaceTracking();
                    this.depthImage       = null;
                    this.depthImageFormat = depthImageFrame.Format;
                }

                if (this.colorImageFormat != colorImageFrame.Format)
                {
                    this.ResetFaceTracking();
                    this.colorImage       = null;
                    this.colorImageFormat = colorImageFrame.Format;
                }


                if (this.depthImage == null)
                {
                    this.depthImage = new short[depthImageFrame.PixelDataLength];
                }

                if (this.colorImage == null)
                {
                    this.colorImage = new byte[colorImageFrame.PixelDataLength];
                }


                if (this.skeletonData == null || this.skeletonData.Length != skeletonFrame.SkeletonArrayLength)
                {
                    this.skeletonData = new Skeleton[skeletonFrame.SkeletonArrayLength];
                }

                colorImageFrame.CopyPixelDataTo(this.colorImage);
                depthImageFrame.CopyPixelDataTo(this.depthImage);
                skeletonFrame.CopySkeletonDataTo(this.skeletonData);


                foreach (Skeleton skeleton in this.skeletonData)
                {
                    if (skeleton.TrackingState == SkeletonTrackingState.Tracked ||
                        skeleton.TrackingState == SkeletonTrackingState.PositionOnly)
                    {
                        if (!this.trackedSkeletons.ContainsKey(skeleton.TrackingId))
                        {
                            this.trackedSkeletons.Add(skeleton.TrackingId, new SkeletonFaceTracker());
                        }


                        SkeletonFaceTracker skeletonFaceTracker;
                        if (this.trackedSkeletons.TryGetValue(skeleton.TrackingId, out skeletonFaceTracker))
                        {
                            skeletonFaceTracker.OnFrameReady(this.Kinect, colorImageFormat, colorImage, depthImageFormat, depthImage, skeleton);
                            skeletonFaceTracker.LastTrackedFrame = skeletonFrame.FrameNumber;
                        }
                    }
                }

                this.RemoveOldTrackers(skeletonFrame.FrameNumber);

                this.InvalidateVisual();
            }
            finally
            {
                if (colorImageFrame != null)
                {
                    colorImageFrame.Dispose();
                }

                if (depthImageFrame != null)
                {
                    depthImageFrame.Dispose();
                }

                if (skeletonFrame != null)
                {
                    skeletonFrame.Dispose();
                }
            }
        }
예제 #23
0
        private void AllFramesReady(object sender, AllFramesReadyEventArgs allFramesReadyEventArgs)
        {
            ColorImageFrame colorImageFrame = null;
            DepthImageFrame depthImageFrame = null;
            SkeletonFrame   skeletonFrame   = null;

            try
            {
                colorImageFrame = allFramesReadyEventArgs.OpenColorImageFrame();
                depthImageFrame = allFramesReadyEventArgs.OpenDepthImageFrame();
                skeletonFrame   = allFramesReadyEventArgs.OpenSkeletonFrame();

                if (colorImageFrame == null || depthImageFrame == null || skeletonFrame == null)
                {
                    return;
                }

                // Check for changes in any of the data this function is receiving
                // and reset things appropriately.
                if (this.depthImageFormat != depthImageFrame.Format)
                {
                    this.DestroyFaceTracker();
                    this.depthImage       = null;
                    this.depthImageFormat = depthImageFrame.Format;
                }

                if (this.colorImageFormat != colorImageFrame.Format)
                {
                    this.DestroyFaceTracker();
                    this.colorImage               = null;
                    this.colorImageFormat         = colorImageFrame.Format;
                    this.colorImageWritableBitmap = null;
                    this.ColorImage.Source        = null;
                    this.theMaterial.Brush        = null;
                }

                if (this.skeletonData != null && this.skeletonData.Length != skeletonFrame.SkeletonArrayLength)
                {
                    this.skeletonData = null;
                }

                // Create any buffers to store copies of the data we work with
                if (this.depthImage == null)
                {
                    this.depthImage = new short[depthImageFrame.PixelDataLength];
                }

                if (this.colorImage == null)
                {
                    this.colorImage = new byte[colorImageFrame.PixelDataLength];
                }

                if (this.colorImageWritableBitmap == null)
                {
                    this.colorImageWritableBitmap = new WriteableBitmap(
                        colorImageFrame.Width, colorImageFrame.Height, 96, 96, PixelFormats.Bgr32, null);
                    this.ColorImage.Source = this.colorImageWritableBitmap;
                    this.theMaterial.Brush = new ImageBrush(this.colorImageWritableBitmap)
                    {
                        ViewportUnits = BrushMappingMode.Absolute
                    };
                }

                if (this.skeletonData == null)
                {
                    this.skeletonData = new Skeleton[skeletonFrame.SkeletonArrayLength];
                }

                // Copy data received in this event to our buffers.
                colorImageFrame.CopyPixelDataTo(this.colorImage);
                depthImageFrame.CopyPixelDataTo(this.depthImage);
                skeletonFrame.CopySkeletonDataTo(this.skeletonData);
                this.colorImageWritableBitmap.WritePixels(
                    new Int32Rect(0, 0, colorImageFrame.Width, colorImageFrame.Height),
                    this.colorImage,
                    colorImageFrame.Width * Bgr32BytesPerPixel,
                    0);

                // Find a skeleton to track.
                // First see if our old one is good.
                // When a skeleton is in PositionOnly tracking state, don't pick a new one
                // as it may become fully tracked again.
                Skeleton skeletonOfInterest =
                    this.skeletonData.FirstOrDefault(
                        skeleton =>
                        skeleton.TrackingId == this.trackingId &&
                        skeleton.TrackingState != SkeletonTrackingState.NotTracked);

                if (skeletonOfInterest == null)
                {
                    // Old one wasn't around.  Find any skeleton that is being tracked and use it.
                    skeletonOfInterest =
                        this.skeletonData.FirstOrDefault(
                            skeleton => skeleton.TrackingState == SkeletonTrackingState.Tracked);

                    if (skeletonOfInterest != null)
                    {
                        // This may be a different person so reset the tracker which
                        // could have tuned itself to the previous person.
                        if (this.faceTracker != null)
                        {
                            this.faceTracker.ResetTracking();
                        }

                        this.trackingId = skeletonOfInterest.TrackingId;
                    }
                }

                bool displayFaceMesh = false;

                if (skeletonOfInterest != null && skeletonOfInterest.TrackingState == SkeletonTrackingState.Tracked)
                {
                    if (this.faceTracker == null)
                    {
                        try
                        {
                            this.faceTracker = new FaceTracker(this.Kinect);
                        }
                        catch (InvalidOperationException)
                        {
                            // During some shutdown scenarios the FaceTracker
                            // is unable to be instantiated.  Catch that exception
                            // and don't track a face.
                            Debug.WriteLine("AllFramesReady - creating a new FaceTracker threw an InvalidOperationException");
                            this.faceTracker = null;
                        }
                    }

                    if (this.faceTracker != null)
                    {
                        FaceTrackFrame faceTrackFrame = this.faceTracker.Track(
                            this.colorImageFormat,
                            this.colorImage,
                            this.depthImageFormat,
                            this.depthImage,
                            skeletonOfInterest);

                        if (faceTrackFrame.TrackSuccessful)
                        {
                            this.UpdateMesh(faceTrackFrame);

                            // Only display the face mesh if there was a successful track.
                            displayFaceMesh = true;
                        }
                    }
                }
                else
                {
                    this.trackingId = -1;
                }

                this.viewport3d.Visibility = displayFaceMesh ? Visibility.Visible : Visibility.Hidden;
            }
            finally
            {
                if (colorImageFrame != null)
                {
                    colorImageFrame.Dispose();
                }

                if (depthImageFrame != null)
                {
                    depthImageFrame.Dispose();
                }

                if (skeletonFrame != null)
                {
                    skeletonFrame.Dispose();
                }
            }
        }
예제 #24
0
        private void OnAllFrameReady(object sender, AllFramesReadyEventArgs e)
        {
            if (this.isShutDown)
            {
                return;
            }

            #region Skeleton Data Process Region
            // 1. Process Skeleton Data
            // Grab Skeleton Frame
            using (SkeletonFrame skeletonFrame = e.OpenSkeletonFrame())
            {
                if (skeletonFrame != null && this.skeletons != null)
                {
                    // Copy Skeleton Datas
                    skeletonFrame.CopySkeletonDataTo(skeletons);
                }
            }
            #endregion

            #region Depth Data Process Region
            // 2. Process Depth Data
            // Grab Depth Frame
            short depthValue;
            using (DepthImageFrame depthFrame = e.OpenDepthImageFrame())
            {
                if (depthFrame != null)
                {
                    // Copy Depth Pixel Data
                    depthFrame.CopyDepthImagePixelDataTo(this.depthPixels);

                    // Convert Depth to RGB
                    for (int i = 0; i < this.depthPixels.Length; i++)
                    {
                        // Get Depth Value For This Pixel
                        depthValue = depthPixels[i].Depth;

                        unsafe
                        {
                            if (depthValue >= this.maxDepth || depthValue <= this.minDepth)
                            {
                                this.depthImage.ImageDataPtr[i] = (byte)0;
                            }
                            else
                            {
                                this.depthImage.ImageDataPtr[i] = this.depthLookupTable[depthValue];
                            }
                        }
                    }

                    if (this.leftHand.TrackingState == JointTrackingState.Tracked)
                    {
                        DepthImagePoint ldHand = this.sensor.CoordinateMapper.MapSkeletonPointToDepthPoint(this.leftHand.Position, DepthImageFormat.Resolution640x480Fps30);

                        int topleft_x = ldHand.X - Configuration.HAND_REGION_WIDTH / 2;
                        int topleft_y = ldHand.Y - Configuration.HAND_REGION_HEIGHT / 2;

                        if (topleft_x < 0)
                        {
                            topleft_x = 0;
                        }
                        if (topleft_x + Configuration.HAND_REGION_WIDTH >= this.sensor.ColorStream.FrameWidth)
                        {
                            topleft_x = this.sensor.ColorStream.FrameWidth - Configuration.HAND_REGION_WIDTH;
                        }
                        if (topleft_y < 0)
                        {
                            topleft_y = 0;
                        }
                        if (topleft_y + Configuration.HAND_REGION_HEIGHT >= this.sensor.ColorStream.FrameHeight)
                        {
                            topleft_y = this.sensor.ColorStream.FrameHeight - Configuration.HAND_REGION_HEIGHT;
                        }

                        CvRect ldHandRect = new CvRect(topleft_x, topleft_y, Configuration.HAND_REGION_WIDTH, Configuration.HAND_REGION_HEIGHT);
                        //Cv.Rectangle( this.depthImage, ldHandRect, new CvScalar( 0, 0, 255 ), 5 ); // Used for Visualization
                        Cv.SetImageROI(this.depthImage, ldHandRect);
                        Cv.Copy(this.depthImage, this.leftHandDepthImage);
                        Cv.ResetImageROI(this.depthImage);

                        // Filter Hand Depth Image
                        Cv.Smooth(this.leftHandDepthImage, this.leftHandDepthImage, SmoothType.Median);
                    }
                    if (this.rightHand.TrackingState == JointTrackingState.Tracked)
                    {
                        DepthImagePoint rdHand = this.sensor.CoordinateMapper.MapSkeletonPointToDepthPoint(this.rightHand.Position, DepthImageFormat.Resolution640x480Fps30);

                        int topleft_x = rdHand.X - Configuration.HAND_REGION_WIDTH / 2;
                        int topleft_y = rdHand.Y - Configuration.HAND_REGION_HEIGHT / 2;

                        if (topleft_x < 0)
                        {
                            topleft_x = 0;
                        }
                        if (topleft_x + Configuration.HAND_REGION_WIDTH >= this.sensor.ColorStream.FrameWidth)
                        {
                            topleft_x = this.sensor.ColorStream.FrameWidth - Configuration.HAND_REGION_WIDTH;
                        }
                        if (topleft_y < 0)
                        {
                            topleft_y = 0;
                        }
                        if (topleft_y + Configuration.HAND_REGION_HEIGHT >= this.sensor.ColorStream.FrameHeight)
                        {
                            topleft_y = this.sensor.ColorStream.FrameHeight - Configuration.HAND_REGION_HEIGHT;
                        }

                        CvRect rdHandRect = new CvRect(topleft_x, topleft_y, Configuration.HAND_REGION_WIDTH, Configuration.HAND_REGION_HEIGHT);
                        //Cv.Rectangle( this.depthImage, rdHandRect, new CvScalar( 0, 0, 255 ), 5 ); // Used for Visualization
                        Cv.SetImageROI(this.depthImage, rdHandRect);
                        Cv.Copy(this.depthImage, this.rightHandDepthImage);
                        Cv.ResetImageROI(this.depthImage);

                        // Filter Hand Depth Image
                        Cv.Smooth(this.rightHandDepthImage, this.rightHandDepthImage, SmoothType.Median);
                    }
                }
            }
            #endregion

            #region Color Data Process Region
            // 3. Process Color Data
            // Grab Color Frame
            using (ColorImageFrame colorFrame = e.OpenColorImageFrame())
            {
                if (colorFrame != null)
                {
                    // Copy Pixel Data
                    unsafe
                    {
                        colorFrame.CopyPixelDataTo(this.colorImage.ImageData, colorFrame.PixelDataLength);
                    }

                    // Draw Skeleton Position
                    if (skeletons != null)
                    {
                        // Find Appropriate Skeleton
                        Skeleton targetSkeleton = null;
                        for (int i = 0; i < skeletons.Length; i++)
                        {
                            // Skip Invalid State
                            if (skeletons[i] == null)
                            {
                                continue;
                            }

                            // Only Fully Tracked Skeleton
                            if (skeletons[i].TrackingState == SkeletonTrackingState.Tracked)
                            {
                                // Set Target Skeleton - If exists Set to nearest.
                                if (targetSkeleton == null)
                                {
                                    targetSkeleton = skeletons[i];
                                }
                                else if (targetSkeleton.Position.Z > skeletons[i].Position.Z)
                                {
                                    targetSkeleton = skeletons[i];
                                }
                            }
                        }

                        if (targetSkeleton != null)
                        {
                            // Left Hand Position
                            this.leftHand = targetSkeleton.Joints[JointType.HandLeft];
                            // Check Tracked Status
                            if (this.leftHand.TrackingState == JointTrackingState.Tracked)
                            {
                                this.leftHandFound = true;

                                leftHandPoint           = this.sensor.CoordinateMapper.MapSkeletonPointToColorPoint(leftHand.Position, ColorImageFormat.RgbResolution640x480Fps30);
                                this.leftHandPosition.X = leftHandPoint.X;
                                this.leftHandPosition.Y = leftHandPoint.Y;
                                int topleft_x = leftHandPoint.X - Configuration.HAND_REGION_WIDTH / 2;
                                int topleft_y = leftHandPoint.Y - Configuration.HAND_REGION_HEIGHT / 2;

                                // Bound Check
                                if (topleft_x < 0)
                                {
                                    topleft_x = 0;
                                }
                                if (topleft_x + Configuration.HAND_REGION_WIDTH >= this.sensor.ColorStream.FrameWidth)
                                {
                                    topleft_x = this.sensor.ColorStream.FrameWidth - Configuration.HAND_REGION_WIDTH;
                                }
                                if (topleft_y < 0)
                                {
                                    topleft_y = 0;
                                }
                                if (topleft_y + Configuration.HAND_REGION_HEIGHT >= this.sensor.ColorStream.FrameHeight)
                                {
                                    topleft_y = this.sensor.ColorStream.FrameHeight - Configuration.HAND_REGION_HEIGHT;
                                }

                                // Set Hand Position
                                leftHandRect = new CvRect(topleft_x, topleft_y, Configuration.HAND_REGION_WIDTH, Configuration.HAND_REGION_HEIGHT);
                                Cv.Rectangle(this.colorImage, this.leftHandRect, new CvScalar(0, 0, 255), 1);                                     // Used for Visualization
                                Cv.SetImageROI(this.colorImage, this.leftHandRect);
                                // Copy Data
                                Cv.Copy(this.colorImage, this.leftHandImage);
                                // Reset ROI
                                Cv.ResetImageROI(this.colorImage);

                                // Smooth Color Hand Image
                                Cv.Smooth(this.leftHandImage, this.leftHandImage, SmoothType.Median);

                                // Only Hand Region
                                CropNear(this.leftHandDepthImage);

                                // Filter With Depth Image
                                FilterFarObjects(this.leftHandImage, this.leftHandDepthImage);

                                // Detect By Skin Color Model
                                this.skinDetector.FilterSkinColorRegion(this.leftHandImage, this.leftHandSkinImage, 0.25f);
                                // Smooth Color Hand Skin Image
                                //Cv.Smooth( this.leftHandSkinImage, this.leftHandSkinImage, SmoothType.Median );
                                Cv.Smooth(this.leftHandSkinImage, this.leftHandSkinImage, SmoothType.Median, 5);
                                Cv.Erode(this.leftHandSkinImage, this.leftHandSkinImage);
                                Cv.Dilate(this.leftHandSkinImage, this.leftHandSkinImage);


                                // Find Object
                                Cv.Sub(this.leftHandDepthImage, this.leftHandSkinImage, this.leftObjectRemainedImage);
                                Cv.Erode(this.leftObjectRemainedImage, this.leftObjectRemainedImage);
                                Cv.Smooth(this.leftObjectRemainedImage, this.leftObjectRemainedImage, SmoothType.Median);

                                // Filter Objects Only
                                FilterFarObjects(this.leftHandImage, this.leftObjectRemainedImage);

                                Cv.CvtColor(this.leftHandImage, this.leftObjectHSVImage, ColorConversion.BgraToBgr);
                                Cv.CvtColor(this.leftObjectHSVImage, this.leftObjectHSVImage, ColorConversion.BgrToHsv);
                                Cv.Split(this.leftObjectHSVImage, this.leftObjectHImage, this.leftObjectSImage, this.leftObjectVImage, null);
                            }
                            else
                            {
                                this.leftHandFound = false;
                            }

                            // Right Hand Position
                            this.rightHand = targetSkeleton.Joints[JointType.HandRight];
                            if (this.rightHand.TrackingState == JointTrackingState.Tracked)
                            {
                                this.rightHandFound = true;

                                rightHandPoint           = this.sensor.CoordinateMapper.MapSkeletonPointToColorPoint(rightHand.Position, ColorImageFormat.RgbResolution640x480Fps30);
                                this.rightHandPosition.X = rightHandPoint.X;
                                this.rightHandPosition.Y = rightHandPoint.Y;

                                int topleft_x = rightHandPoint.X - Configuration.HAND_REGION_WIDTH / 2;
                                int topleft_y = rightHandPoint.Y - Configuration.HAND_REGION_HEIGHT / 2;

                                // Bound Check
                                if (topleft_x < 0)
                                {
                                    topleft_x = 0;
                                }
                                if (topleft_x + Configuration.HAND_REGION_WIDTH >= this.sensor.ColorStream.FrameWidth)
                                {
                                    topleft_x = this.sensor.ColorStream.FrameWidth - Configuration.HAND_REGION_WIDTH;
                                }
                                if (topleft_y < 0)
                                {
                                    topleft_y = 0;
                                }
                                if (topleft_y + Configuration.HAND_REGION_HEIGHT >= this.sensor.ColorStream.FrameHeight)
                                {
                                    topleft_y = this.sensor.ColorStream.FrameHeight - Configuration.HAND_REGION_HEIGHT;
                                }

                                // Set Hand Position
                                rightHandRect = new CvRect(topleft_x, topleft_y, Configuration.HAND_REGION_WIDTH, Configuration.HAND_REGION_HEIGHT);
                                Cv.Rectangle(this.colorImage, this.rightHandRect, new CvScalar(0, 0, 255), 1);                                     // Used for Visualization
                                Cv.SetImageROI(this.colorImage, this.rightHandRect);
                                // Copy Data
                                Cv.Copy(this.colorImage, this.rightHandImage);
                                // Reset ROI
                                Cv.ResetImageROI(this.colorImage);

                                // Smooth Color Hand Image
                                Cv.Smooth(this.rightHandImage, this.rightHandImage, SmoothType.Median);

                                CropNear(this.rightHandDepthImage);

                                // Filter With Depth Image
                                FilterFarObjects(this.rightHandImage, this.rightHandDepthImage);

                                // Detect By Skin Color Model
                                this.skinDetector.FilterSkinColorRegion(this.rightHandImage, this.rightHandSkinImage, 0.25f);
                                // Smooth Color Hand Skin Image
                                //Cv.Smooth( this.rightHandSkinImage, this.rightHandSkinImage, SmoothType.Median );
                                Cv.Smooth(this.rightHandSkinImage, this.rightHandSkinImage, SmoothType.Median, 5);
                                Cv.Erode(this.rightHandSkinImage, this.rightHandSkinImage);
                                Cv.Dilate(this.rightHandSkinImage, this.rightHandSkinImage);

                                // Find Object
                                Cv.Sub(this.rightHandDepthImage, this.rightHandSkinImage, this.rightObjectRemainedImage);
                                Cv.Erode(this.rightObjectRemainedImage, this.rightObjectRemainedImage);
                                Cv.Smooth(this.rightObjectRemainedImage, this.rightObjectRemainedImage, SmoothType.Median);

                                // Filter Objects Only
                                FilterFarObjects(this.rightHandImage, this.rightObjectRemainedImage);

                                Cv.CvtColor(this.rightHandImage, this.rightObjectHSVImage, ColorConversion.BgraToBgr);
                                Cv.CvtColor(this.rightObjectHSVImage, this.rightObjectHSVImage, ColorConversion.BgrToHsv);
                                Cv.Split(this.rightObjectHSVImage, this.rightObjectHImage, this.rightObjectSImage, this.rightObjectVImage, null);
                            }
                            else
                            {
                                this.rightHandFound = false;
                            }
                        }
                    }
                    #endregion

                    #region Image Display Region
                    // Show Depth Image
                    Cv.ShowImage("Depth Image", this.depthImage);
                    Cv.ShowImage("Left Hand Depth Image", this.leftHandDepthImage);
                    Cv.ShowImage("Right Hand Depth Image", this.rightHandDepthImage);

                    // Show Color Image
                    Cv.ShowImage("Color Image", this.colorImage);
                    Cv.ShowImage("Left Hand Image", this.leftHandImage);
                    Cv.ShowImage("Right Hand Image", this.rightHandImage);
                    Cv.ShowImage("Left Hand Skin Image", this.leftHandSkinImage);
                    Cv.ShowImage("Right Hand Skin Image", this.rightHandSkinImage);

                    // Show Object Only Image
                    Cv.ShowImage("Left Hand Object Image", this.leftObjectRemainedImage);
                    Cv.ShowImage("Right Hand Object Image", this.rightObjectRemainedImage);

                    // Show Object HSV Image
                    Cv.ShowImage("Left Hand H Image", this.leftObjectHImage);
                    Cv.ShowImage("Left Hand S Image", this.leftObjectSImage);
                    Cv.ShowImage("Left Hand V Image", this.leftObjectVImage);
                    Cv.ShowImage("Right Hand H Image", this.rightObjectHImage);
                    Cv.ShowImage("Right Hand S Image", this.rightObjectSImage);
                    Cv.ShowImage("Right Hand V Image", this.rightObjectVImage);


                    #endregion
                }
            }
        }
        void _sensor_AllFramesReady(object sender, AllFramesReadyEventArgs e)
        {
            bool gotColor = false;
            bool gotDepth = false;

            using (ColorImageFrame colorFrame = e.OpenColorImageFrame())
            {
                if (colorFrame != null)
                {
                    Debug.Assert(colorFrame.Width == 640 && colorFrame.Height == 480, "This app only uses 640x480.");

                    if (_colorPixels.Length != colorFrame.PixelDataLength)
                    {
                        _colorPixels      = new byte[colorFrame.PixelDataLength];
                        _colorPixels2     = new byte[colorFrame.PixelDataLength];
                        _bitmap           = new WriteableBitmap(640, 480, 96.0, 96.0, PixelFormats.Bgr32, null);
                        _bitmapBits       = new byte[640 * 480 * 4];
                        this.Image.Source = _bitmap;
                    }

                    colorFrame.CopyPixelDataTo(_colorPixels);
                    gotColor = true;
                }
            }

            using (DepthImageFrame depthFrame = e.OpenDepthImageFrame())
            {
                if (depthFrame != null)
                {
                    Debug.Assert(depthFrame.Width == 640 && depthFrame.Height == 480, "This app only uses 640x480.");

                    if (_depthPixels.Length != depthFrame.PixelDataLength)
                    {
                        _depthPixels          = new short[depthFrame.PixelDataLength];
                        _mappedDepthLocations = new ColorImagePoint[depthFrame.PixelDataLength];
                    }

                    depthFrame.CopyPixelDataTo(_depthPixels);
                }

                gotDepth = true;
            }

            // Put the color image into _bitmapBits
            for (int i = 0; i < _colorPixels.Length; i += 4)
            {
                _bitmapBits[i + 3] = 255;
                _bitmapBits[i + 2] = _colorPixels[i + 2];
                _bitmapBits[i + 1] = _colorPixels[i + 1];
                _bitmapBits[i]     = _colorPixels[i];
            }

            this._sensor.MapDepthFrameToColorFrame(DepthImageFormat.Resolution640x480Fps30, _depthPixels, ColorImageFormat.RgbResolution640x480Fps30, _mappedDepthLocations);


            c = 0;
            while (c < 10)
            {
                c++;
                _bitmap.WritePixels(new Int32Rect(0, 0, _bitmap.PixelWidth, _bitmap.PixelHeight), _bitmapBits, _bitmap.PixelWidth * sizeof(int), 0);
                CreateThumbnail("C:\\Users\\Titiana\\Desktop\\final\\Kinect-Align-Depth-RGB-master\\Camera2\\rgb" + c.ToString() + ".bmp", _bitmap.Clone());
                System.IO.StreamWriter file = new System.IO.StreamWriter("C:\\Users\\Titiana\\Desktop\\final\\Kinect-Align-Depth-RGB-master\\Camera2\\mapping" + c.ToString() + ".txt");

                for (int i = 0; i < _depthPixels.Length; i++)
                {
                    int             depthVal = _depthPixels[i] >> DepthImageFrame.PlayerIndexBitmaskWidth;
                    ColorImagePoint point    = _mappedDepthLocations[i];
                    string          line     = depthVal.ToString() + " " + point.X.ToString() + " " + point.Y.ToString() + "\n";
                    file.Write(line);
                }

                file.Close();
            }
        }
예제 #26
0
        void newSensor_AllFramesReady(object sender, AllFramesReadyEventArgs e)
        {
            using (ColorImageFrame colorFrame = e.OpenColorImageFrame())
            {
                if (colorFrame == null)
                {
                    return;
                }

                pixels2 = new byte[colorFrame.PixelDataLength];
                pixels4 = new byte[colordatalength];
                colorFrame.CopyPixelDataTo(pixels2);
                for (int i = colorminy; i < colormaxy; i++)
                {
                    Array.Copy(pixels2, (i * colorFrame.Width + colorminx) * 4, pixels4, (i - colorminy) * colorimagewidth * 4, colorimagewidth * 4);
                }

                //number of bytes per row width * 4 (B,G,R,Empty)
                int stride = colorimagewidth * 4;

                //create image
                image2.Source =
                    BitmapSource.Create(colorimagewidth, colorimageheight,
                                        96, 96, PixelFormats.Bgr32, null, pixels4, stride);
            }

            using (DepthImageFrame depthFrame = e.OpenDepthImageFrame())
            {
                if (depthFrame == null)
                {
                    return;
                }

                pixels1 = GenerateColoredBytes(depthFrame);
                pixels3 = new byte[depthdatalength];
                for (int i = depthminy; i < depthmaxy; i++)
                {
                    Array.Copy(pixels1, (i * depthFrame.Width + depthminx) * 4, pixels3, (i - depthminy) * depthimagewidth * 4, depthimagewidth * 4);
                }

                //number of bytes per row width * 4 (B,G,R,Empty)
                int stride = depthimagewidth * 4;

                //create image
                image1.Source =
                    BitmapSource.Create(depthimagewidth, depthimageheight,
                                        96, 96, PixelFormats.Bgr32, null, pixels3, stride);
            }

            if (counter == 0)
            {
                try
                {
                    sensor.ElevationAngle = (int)slider1.Value;
                }
                catch (System.InvalidOperationException) { }

                pixels5 = imageMerge(pixels3, depthimagewidth, depthimageheight, pixels4, colorimagewidth, colorimageheight);
                int stride = depthimagewidth * 4;
                image3.Source = BitmapSource.Create(depthimagewidth, depthimageheight, 96, 96, PixelFormats.Bgr32, null, pixels5, stride);

                image4.Source = image3.Source;

                SaveImageToJPG(image4, OutputImage);

                /*
                 * using (StreamWriter newTask = new StreamWriter(output, false))
                 * {
                 *  newTask.WriteLine(calculateboardstring());
                 * }*/

                off1 = (int)slider2.Value;
                off2 = (int)slider3.Value;

                colorminx = 320 - off1; //-240; //boundaries of board on color image
                colormaxx = 320 + off1; //+240;
                colorminy = 240 - off2; //-180;
                colormaxy = 240 + off2; //+180;

                colorimagewidth  = colormaxx - colorminx;
                colorimageheight = colormaxy - colorminy;
                colordatalength  = colorimagewidth * colorimageheight * 4; //in bytes

                depthminx = 320 - off1 - offw + offx;                      //240; //boundaries of board on depth image
                depthmaxx = 320 + off1 + offw + offx;                      //+240;
                depthminy = 240 - off2 - offh + offy;                      //-180;
                depthmaxy = 240 + off2 + offh + offy;                      //+180;

                depthimagewidth  = depthmaxx - depthminx;
                depthimageheight = depthmaxy - depthminy;
                depthdatalength  = depthimagewidth * depthimageheight * 4; //in bytes

                border4.Height = depthimageheight;
                border4.Width  = depthimagewidth;
                image4.Height  = depthimageheight;
                image4.Width   = depthimagewidth;
                counter++;
            }
            else if (counter > delay)
            {
                counter = 0;
            }
            else
            {
                counter++;
            }
        }
예제 #27
0
        private void KinectSensorOnAllFramesReady(object sender, AllFramesReadyEventArgs e)
        {
            if (frameProccessed[1] == false)
            {
                frameProccessed[1] = true;
            }
            else
            {
                frameProccessed[1] = false;
                return;
            }
            ColorImageFrame colorImageFrame = null;
            DepthImageFrame depthImageFrame = null;
            SkeletonFrame   skeletonFrame   = null;

            try
            {
                colorImageFrame = e.OpenColorImageFrame();
                depthImageFrame = e.OpenDepthImageFrame();
                skeletonFrame   = e.OpenSkeletonFrame();

                if (colorImageFrame == null || depthImageFrame == null || skeletonFrame == null)
                {
                    return;
                }

                if (this.depthImageFormat != depthImageFrame.Format)
                {
                    this.depthImage       = null;
                    this.depthImageFormat = depthImageFrame.Format;
                }

                if (this.colorImageFormat != colorImageFrame.Format)
                {
                    this.colorImage       = null;
                    this.colorImageFormat = colorImageFrame.Format;
                }

                if (this.depthImage == null)
                {
                    this.depthImage = new short[depthImageFrame.PixelDataLength];
                }

                if (this.colorImage == null)
                {
                    this.colorImage = new byte[colorImageFrame.PixelDataLength];
                }

                if (this.skeletonData == null || this.skeletonData.Length != skeletonFrame.SkeletonArrayLength)
                {
                    this.skeletonData = new Skeleton[skeletonFrame.SkeletonArrayLength];
                }

                colorImageFrame.CopyPixelDataTo(this.colorImage);
                depthImageFrame.CopyPixelDataTo(this.depthImage);
                skeletonFrame.CopySkeletonDataTo(this.skeletonData);
            }
            finally
            {
                if (colorImageFrame != null)
                {
                    colorImageFrame.Dispose();
                }

                if (depthImageFrame != null)
                {
                    depthImageFrame.Dispose();
                }

                if (skeletonFrame != null)
                {
                    skeletonFrame.Dispose();
                }

                using (depthImageFrame)
                {
                    if (depthImageFrame != null && skeletonData != null)
                    {
                        foreach (Skeleton sd in skeletonData)
                        {
                            if (sd.TrackingState == SkeletonTrackingState.Tracked || sd.TrackingState == SkeletonTrackingState.PositionOnly)
                            {
                                Joint joint = sd.Joints[JointType.Head];

                                DepthImagePoint  depthPoint;
                                CoordinateMapper coordinateMapper = new CoordinateMapper(frontSensor);
                                depthPoint = coordinateMapper.MapSkeletonPointToDepthPoint(joint.Position, DepthImageFormat.Resolution320x240Fps30);

                                point = new System.Windows.Point((int)(frontSensor.ColorStream.FrameWidth * depthPoint.X
                                                                       / depthImageFrame.Width),
                                                                 (int)(frontSensor.ColorStream.FrameHeight * depthPoint.Y
                                                                       / depthImageFrame.Height));

                                /* textBlock1.Text = string.Format("X:{0:0.00} Y:{1:0.00} Z:{2:0.00}",
                                 *                              point.X,
                                 *                              point.Y,
                                 *                              joint.Position.Z); */

                                Canvas.SetLeft(headEllipse, point.X - headEllipse.Width / 2);
                                Canvas.SetTop(headEllipse, point.Y - headEllipse.Height / 2);

                                if (this.faceTracker == null)
                                {
                                    try
                                    {
                                        this.faceTracker = new FaceTracker(frontSensor);
                                    }
                                    catch (InvalidOperationException)
                                    {
                                        // During some shutdown scenarios the FaceTrack
                                        // is unable to be instantiated.  Catch that exception
                                        // and don't track a face.
                                        this.faceTracker = null;
                                    }
                                }
                                if (this.faceTracker != null)
                                {
                                    FaceTrackFrame frame = this.faceTracker.Track(
                                        colorImageFormat, colorImage, depthImageFormat, depthImage, sd);

                                    if (frame.TrackSuccessful)
                                    {
                                        faceTriangles   = frame.GetTriangles();
                                        this.facePoints = frame.GetProjected3DShape();

                                        var faceModelPts = new List <Point>();
                                        var faceModel    = new List <FaceModelTriangle>();


                                        for (int i = 0; i < this.facePoints.Count; i++)
                                        {
                                            faceModelPts.Add(new Point(this.facePoints[i].X + 0.5f, this.facePoints[i].Y + 0.5f));
                                        }

                                        foreach (var t in faceTriangles)
                                        {
                                            var triangle = new FaceModelTriangle();
                                            triangle.P1 = faceModelPts[t.First];
                                            //triangle.P2 = faceModelPts[t.Second];
                                            //triangle.P3 = faceModelPts[t.Third];
                                            faceModel.Add(triangle);
                                        }

                                        Canvas.SetLeft(noseEllipse, faceModel[108].P1.X - noseEllipse.Width / 2);
                                        Canvas.SetTop(noseEllipse, faceModel[108].P1.Y - noseEllipse.Height / 2);
                                        nosePoint = new Point(faceModel[108].P1.X, faceModel[108].P1.Y);
                                    }
                                }
                            }
                        }
                    }
                }

                getAttentionAngle(nosePoint);
            }
        }
예제 #28
0
 /// <summary>
 /// 画素のバイト列を取得する
 /// </summary>
 /// <param name="colorFrame"></param>
 /// <returns></returns>
 public static byte[] ToPixelData(this ColorImageFrame colorFrame)
 {
     byte[] pixels = new byte[colorFrame.PixelDataLength];
     colorFrame.CopyPixelDataTo(pixels);
     return(pixels);
 }
예제 #29
0
        private void OnAllFramesReady(object sender, AllFramesReadyEventArgs allFramesReadyEventArgs)
        {
            ColorImageFrame colorImageFrame = null;
            DepthImageFrame depthImageFrame = null;
            SkeletonFrame   skeletonFrame   = null;

            try
            {
                colorImageFrame = allFramesReadyEventArgs.OpenColorImageFrame();
                depthImageFrame = allFramesReadyEventArgs.OpenDepthImageFrame();
                skeletonFrame   = allFramesReadyEventArgs.OpenSkeletonFrame();

                if (colorImageFrame == null || depthImageFrame == null || skeletonFrame == null)
                {
                    return;
                }

                // Check for image format changes.  The FaceTracker doesn't
                // deal with that so we need to reset.
                if (this.depthImageFormat != depthImageFrame.Format)
                {
                    this.ResetFaceTracking();
                    this.depthImage       = null;
                    this.depthImageFormat = depthImageFrame.Format;
                }

                if (this.colorImageFormat != colorImageFrame.Format)
                {
                    this.ResetFaceTracking();
                    this.colorImage       = null;
                    this.colorImageFormat = colorImageFrame.Format;
                }

                // Create any buffers to store copies of the data we work with
                if (this.depthImage == null)
                {
                    this.depthImage = new short[depthImageFrame.PixelDataLength];
                }

                if (this.colorImage == null)
                {
                    this.colorImage = new byte[colorImageFrame.PixelDataLength];
                }

                // Get the skeleton information
                if (this.skeletonData == null || this.skeletonData.Length != skeletonFrame.SkeletonArrayLength)
                {
                    this.skeletonData = new Skeleton[skeletonFrame.SkeletonArrayLength];
                }

                colorImageFrame.CopyPixelDataTo(this.colorImage);
                depthImageFrame.CopyPixelDataTo(this.depthImage);
                skeletonFrame.CopySkeletonDataTo(this.skeletonData);

                // Update the list of trackers and the trackers with the current frame information
                foreach (Skeleton skeleton in this.skeletonData)
                {
                    if (skeleton.TrackingState == SkeletonTrackingState.Tracked ||
                        skeleton.TrackingState == SkeletonTrackingState.PositionOnly)
                    {
                        // We want keep a record of any skeleton, tracked or untracked.
                        if (!this.trackedSkeletons.ContainsKey(skeleton.TrackingId))
                        {
                            this.trackedSkeletons.Add(skeleton.TrackingId, new SkeletonFaceTracker());
                        }

                        // Give each tracker the upated frame.
                        SkeletonFaceTracker skeletonFaceTracker;
                        if (this.trackedSkeletons.TryGetValue(skeleton.TrackingId, out skeletonFaceTracker))
                        {
                            skeletonFaceTracker.OnFrameReady(this.Kinect, colorImageFormat, colorImage, depthImageFormat, depthImage, skeleton);
                            skeletonFaceTracker.LastTrackedFrame = skeletonFrame.FrameNumber;
                        }
                    }
                }

                this.RemoveOldTrackers(skeletonFrame.FrameNumber);

                this.InvalidateVisual();
            }
            finally
            {
                if (colorImageFrame != null)
                {
                    colorImageFrame.Dispose();
                }

                if (depthImageFrame != null)
                {
                    depthImageFrame.Dispose();
                }

                if (skeletonFrame != null)
                {
                    skeletonFrame.Dispose();
                }
            }
        }
예제 #30
0
        private void AllFramesReadyEventHandler(object sender, AllFramesReadyEventArgs args)
        {
            using (ColorImageFrame ciFrame = args.OpenColorImageFrame())
            {
                if (null != ciFrame)
                {
                    ciFrame.CopyPixelDataTo(this.ColorPixels);

                    ColorBitmap.WritePixels(new Int32Rect(0, 0, ColorWidth, ColorHeight),
                                            ColorPixels, ColorWidth * sizeof(int), 0);
                }
            }

            using (DepthImageFrame diFrame = args.OpenDepthImageFrame())
            {
                if (null != diFrame)
                {
                    diFrame.CopyPixelDataTo(this.DepthDatas);
                }
                else
                {
                    return;
                }
            }

            // Clear
            //Array.Clear(PlayerPixels, 0, PlayerPixels.Length);
            //System.Threading.Tasks.Parallel.For(0, PlayerPixels.Length, index =>
            //    {
            //        PlayerPixels[index] = 200;
            //    });

            Array.Clear(CIP, 0, CIP.Length);

            gSensor.MapDepthFrameToColorFrame(DIF, DepthDatas, CIF, CIP);

            byte[] pixels = new byte[gSensor.DepthStream.FramePixelDataLength * sizeof(int)];

            // Fill the Player Image
            for (int hIndex = 0; hIndex < DepthHeight; ++hIndex)
            {
                for (int wIndex = 0; wIndex < DepthWidth; ++wIndex)
                {
                    int index = wIndex + hIndex * DepthWidth;
                    //int player = DepthDatas[index] & DepthImageFrame.PlayerIndexBitmask;

                    if (0 < (DepthDatas[index] & DepthImageFrame.PlayerIndexBitmask)) // Just for Player
                    {
                        ColorImagePoint cip = CIP[index];

                        // scale color coordinates to depth resolution
                        int colorInDepthX = (int)(cip.X / this.Divisor);
                        int colorInDepthY = (int)(cip.Y / this.Divisor);

                        if (colorInDepthX > 0 && colorInDepthX < this.DepthWidth &&
                            colorInDepthY >= 0 && colorInDepthY < this.DepthHeight)
                        {
                            // calculate index into the green screen pixel array
                            int playerIndex = (colorInDepthX + (colorInDepthY * this.DepthWidth)) << 2;
                            int colorIndex  = (cip.X + cip.Y * ColorWidth) << 2;

                            pixels[playerIndex]     = ColorPixels[colorIndex]; //BitConverter.ToInt32(ColorPixels, colorIndex);
                            pixels[playerIndex + 1] = ColorPixels[colorIndex + 1];
                            pixels[playerIndex + 2] = ColorPixels[colorIndex + 2];
                            pixels[playerIndex + 3] = ColorPixels[colorIndex + 3];

                            --playerIndex;
                            --colorIndex;

                            pixels[playerIndex]     = ColorPixels[colorIndex]; //BitConverter.ToInt32(ColorPixels, colorIndex);
                            pixels[playerIndex + 1] = ColorPixels[colorIndex + 1];
                            pixels[playerIndex + 2] = ColorPixels[colorIndex + 2];
                            pixels[playerIndex + 3] = ColorPixels[colorIndex + 3];
                        }

                        HadPlayer = true;
                    }
                    //else
                    //{
                    //    HadPlayer = false;
                    //}
                }
            }

            lock (gLock)
            {
                // Enqueue
                //PixelsQueue.Enqueue(pixels);
                //Average.ResetQueue(PixelsQueue, 3);

                PixelsLinkedList.AddLast(pixels);
                Average.ResetLinkedList(PixelsLinkedList, 3);
            }

            // Smoothen
            if (null == smooth && HadPlayer)
            {
                Color bg = new Color();
                bg.B = bg.G = bg.R = 0;

                // Gaussian
                //smooth = new GaussianFilter(DepthWidth, DepthHeight, PixelFormats.Bgr32, bg);

                // Bilateral
                smooth = new BilateralFilter(DepthWidth, DepthHeight, PixelFormats.Bgr32);

                // Median
                smooth2 = new GenericMedian(DepthWidth, DepthHeight, PixelFormats.Bgr32, bg, 3);

                median = new AForge.Imaging.Filters.Median(5);

                if (null == globalBWorker)
                {
                    globalBWorker         = new BackgroundWorker();
                    globalBWorker.DoWork += DoWorking;

                    globalBWorker.RunWorkerAsync();
                }
            }

            ////PlayerBitmap.WritePixels(new Int32Rect(0, 0, DepthWidth, DepthHeight),
            ////    PlayerPixels, DepthWidth * ((PlayerBitmap.Format.BitsPerPixel + 7) / 8), 0);
        }