Ejemplo n.º 1
0
        void sensor_AllFramesReady(object sender, Microsoft.Kinect.AllFramesReadyEventArgs e)
        {
            if (this.command == "Stop")
            {
                Bot.stop();
            }
            if (this.command == "Forward")
            {
                Bot.traverse();
            }
            if (this.command == "Right")
            {
                Bot.turnRight();
            }
            if (this.command == "Left")
            {
                Bot.turnRight();
            }
            xf++;
            if (xf % 5 == 0)
            {
                xf = 0;
                if (this.command != null)
                {
                    using (ColorImageFrame colorFrame = e.OpenColorImageFrame())
                    {
                        using (DepthImageFrame depthFrame = e.OpenDepthImageFrame())
                        {
                            using (SkeletonFrame skeletonFrame = e.OpenSkeletonFrame())
                            {
                                humanPosition = frameToHuman(skeletonFrame);

                                if (colorFrame != null)
                                {
                                    // Copy the pixel data from the image to a temporary array
                                    colorFrame.CopyPixelDataTo(this.colorPixels);

                                    // Write the pixel data into our bitmap
                                    this.colorBitmap.WritePixels(
                                        new Int32Rect(0, 0, this.colorBitmap.PixelWidth, this.colorBitmap.PixelHeight),
                                        this.colorPixels,
                                        this.colorBitmap.PixelWidth * sizeof(int),
                                        0);

                                    // Error here due to OpenCV_core290.dll
                                    //int[] objPos = new int[2];
                                    //objPos = tmp.matchColor(ImageProc.colorFrameToImage(colorFrame));
                                    //if (objPos != null)
                                    //{
                                    //    short blobDepth = getDepthAtPoint(objPos, depthFrame);
                                    //    this.lblObject.Content = objPos[0] + ", " + objPos[1] + ", " + blobDepth;
                                    //}
                                    //else
                                    //{
                                    //    this.lblObject.Content = "Null";
                                    //}
                                    System.Drawing.Bitmap bmp    = ImageProc.colorFrameToAforge(colorFrame);
                                    HSLFiltering          filter = new HSLFiltering();
                                    // set color ranges to keep
                                    if (objec[0] == -1)
                                    {
                                        if (command == "Fetching Bottle")
                                        {
                                            filter.Hue        = bottleH;
                                            filter.Saturation = bottleS;
                                            filter.Luminance  = bottleL;
                                        }
                                        else if (command == "Fetching Box")
                                        {
                                            filter.Hue        = boxH;
                                            filter.Saturation = boxS;
                                            filter.Luminance  = boxL;
                                        }

                                        //// apply the filter
                                        filter.ApplyInPlace(bmp);

                                        BlobCounter        blobCounter = new BlobCounter(bmp);
                                        int                i           = blobCounter.ObjectsCount;
                                        ExtractBiggestBlob fil         = new ExtractBiggestBlob();

                                        int[] pp = new int[2];
                                        pp[0] = 0;
                                        pp[1] = 0;
                                        int h = 0;
                                        if (i > 0)
                                        {
                                            fil.Apply(bmp);
                                            pp[0] = fil.BlobPosition.X;
                                            pp[1] = fil.BlobPosition.Y;

                                            h = fil.Apply(bmp).Height;
                                        }

                                        short blobDepth = getDepthAtPoint(pp, depthFrame);
                                        this.lblObject.Content = pp[0] + ", " + pp[1] + ", " + blobDepth;
                                        this.objec[0]          = pp[0];
                                        this.objec[1]          = pp[1];
                                        this.objec[2]          = blobDepth;
                                    }
                                    else
                                    {
                                        filter.Hue        = botH;
                                        filter.Saturation = botS;
                                        filter.Luminance  = botL;
                                        filter.ApplyInPlace(bmp);

                                        BlobCounter        blobCounter = new BlobCounter(bmp);
                                        int                i           = blobCounter.ObjectsCount;
                                        ExtractBiggestBlob fil         = new ExtractBiggestBlob();

                                        int[] pp = new int[2];
                                        pp[0] = 0;
                                        pp[1] = 0;
                                        int h = 0;
                                        if (i > 0)
                                        {
                                            fil.Apply(bmp);
                                            pp[0] = fil.BlobPosition.X;
                                            pp[1] = fil.BlobPosition.Y;

                                            h = fil.Apply(bmp).Height;
                                        }

                                        short blobDepth = getDepthAtPoint(pp, depthFrame);
                                        this.lblBot.Content = pp[0] + ", " + pp[1] + ", " + blobDepth;
                                        this.bot[0]         = pp[0];
                                        this.bot[1]         = pp[1];
                                        this.bot[2]         = blobDepth;
                                    }

                                    //Assign Manual Position to bot and object
                                }
                                if (humanPosition != null)
                                {
                                    this.lblHuman.Content = humanPosition[0] + ", " + humanPosition[1] + ", " + humanPosition[2];
                                }
                                else
                                {
                                    this.lblHuman.Content = "No Human detected";
                                }

                                if (this.path == 0)
                                {
                                    if (humanPosition != null)
                                    {
                                        if (Bot.moveDoraemon(this.bot[0], this.humanPosition[0], this.bot[2], this.humanPosition[2]) == 0)
                                        {
                                            this.path = 1;
                                        }
                                    }
                                }
                                else
                                {
                                    if (Bot.moveDoraemon(this.bot[0], this.objec[0], this.bot[2], this.objec[2]) == 0)
                                    {
                                        Bot.stop();
                                    }
                                }
                            }
                        }
                    }
                    this.lbl.Content = command;
                }
            }
        }
Ejemplo n.º 2
0
        private void OnAllFramesReady(object sender, Microsoft.Kinect.AllFramesReadyEventArgs allFramesReadyEventArgs)
        {
            ColorImageFrame colorImageFrame = null;
            DepthImageFrame depthImageFrame = null;
            SkeletonFrame   skeletonFrame   = null;

            try
            {
                colorImageFrame = allFramesReadyEventArgs.OpenColorImageFrame();
                depthImageFrame = allFramesReadyEventArgs.OpenDepthImageFrame();
                skeletonFrame   = allFramesReadyEventArgs.OpenSkeletonFrame();

                if (colorImageFrame == null || depthImageFrame == null || skeletonFrame == null)
                {
                    return;
                }

                // Check for image format changes.  The FaceTracker doesn't
                // deal with that so we need to reset.
                if (this.depthImageFormat != depthImageFrame.Format)
                {
                    this.depthImage       = null;
                    this.depthImageFormat = depthImageFrame.Format;
                }

                if (this.colorImageFormat != colorImageFrame.Format)
                {
                    this.colorImage       = null;
                    this.colorImageFormat = colorImageFrame.Format;
                }

                // Create any buffers to store copies of the data we work with
                if (this.depthImage == null)
                {
                    this.depthImage = new short[depthImageFrame.PixelDataLength];
                }

                if (this.colorImage == null)
                {
                    this.colorImage = new byte[colorImageFrame.PixelDataLength];
                }

                // Get the skeleton information
                if (this.SkeletonData == null || this.SkeletonData.Length != skeletonFrame.SkeletonArrayLength)
                {
                    this.SkeletonData = new Skeleton[skeletonFrame.SkeletonArrayLength];
                }

                colorImageFrame.CopyPixelDataTo(this.colorImage);
                depthImageFrame.CopyPixelDataTo(this.depthImage);
                skeletonFrame.CopySkeletonDataTo(this.SkeletonData);
                Skeleton activeSkeleton = null;
                activeSkeleton = (from skel in this.SkeletonData where skel.TrackingState == SkeletonTrackingState.Tracked select skel).FirstOrDefault();


                //Idea: Separate Eye-Parts of Color Image
                //Use learning Algorithm for right and left eye
                //Detect blink on separated parts of color Image

                //colorImage is one dimensional array with 640 x 480 x 4 (RGBA) values


                if (activeSkeleton != null)
                {
                    FaceTrackFrame currentFaceFrame = faceTracker.Track(ColorImageFormat.RgbResolution640x480Fps30, colorImage, depthImageFormat, depthImage, activeSkeleton);
                    float          browRaiserValue  = currentFaceFrame.GetAnimationUnitCoefficients()[AnimationUnit.BrowRaiser];
                    float          browLowererValue = currentFaceFrame.GetAnimationUnitCoefficients()[AnimationUnit.BrowLower];
                    tbBrowLowerer.Text = browLowererValue.ToString();
                    tbBrowRaiser.Text  = browRaiserValue.ToString();
                    //Get relevant Points for blink detection
                    //Left eye
                    int    minX    = (int)Math.Round(currentFaceFrame.GetProjected3DShape()[FeaturePoint.AboveOneFourthLeftEyelid].X);
                    int    minY    = (int)Math.Round(currentFaceFrame.GetProjected3DShape()[FeaturePoint.AboveOneFourthLeftEyelid].Y);
                    int    maxX    = (int)Math.Round(currentFaceFrame.GetProjected3DShape()[FeaturePoint.BelowThreeFourthLeftEyelid].X);
                    int    maxY    = (int)Math.Round(currentFaceFrame.GetProjected3DShape()[FeaturePoint.BelowThreeFourthLeftEyelid].Y);
                    Bitmap leftEye = EyeExtract(colorImageFrame, currentFaceFrame, minX, minY, maxX, maxY, false);
                    pbLeftEye.Image = leftEye;

                    //Right eye
                    minX = (int)Math.Round(currentFaceFrame.GetProjected3DShape()[FeaturePoint.AboveThreeFourthRightEyelid].X);
                    minY = (int)Math.Round(currentFaceFrame.GetProjected3DShape()[FeaturePoint.AboveThreeFourthRightEyelid].Y);
                    maxX = (int)Math.Round(currentFaceFrame.GetProjected3DShape()[FeaturePoint.OneFourthBottomRightEyelid].X);
                    maxY = (int)Math.Round(currentFaceFrame.GetProjected3DShape()[FeaturePoint.OneFourthBottomRightEyelid].Y);

                    Bitmap rightEye = EyeExtract(colorImageFrame, currentFaceFrame, minX, minY, maxX, maxY, true);
                    pbRightEye.Image = rightEye;

                    //Wende Kantenfilter auf die beiden Augen an.
                    double dxRight;
                    double dyRight;
                    double dxLeft;
                    double dyLeft;
                    if (rightEye != null && leftEye != null)
                    {
                        Bitmap edgePicRight = Convolution(ConvertGrey(rightEye), true, out dxRight, out dyRight);
                        Bitmap edgePicLeft  = Convolution(ConvertGrey(leftEye), false, out dxLeft, out dyLeft);



                        //If Face is rotated, move Mouse
                        if (headRotationHistory.Count > filterLength && currentFaceFrame.TrackSuccessful)
                        {
                            int x = 0;
                            int y = 0;

                            //Method 1: Ohne Glättung
                            //ScaleXY(currentFaceFrame.Rotation, out x, out y);
                            //MouseControl.Move(x, y);

                            ////Method 2: Glättung über die letzten x Bilder:
                            //int i = 0;
                            //Vector3DF rotationMedium = new Vector3DF();
                            //while (i < 10 && headRotationHistory.Count - 1 > i)
                            //{
                            //    i++;
                            //    rotationMedium.X += headRotationHistory[headRotationHistory.Count - 1 - i].X;
                            //    rotationMedium.Y += headRotationHistory[headRotationHistory.Count - 1 - i].Y;
                            //}
                            //rotationMedium.X = rotationMedium.X / i;
                            //rotationMedium.Y = rotationMedium.Y / i;
                            //ScaleXY(rotationMedium, out x, out y);
                            //MouseControl.Move(x, y);

                            //Method 3: Gauß-Filter: Gewichte die letzten Bilder stärker.



                            Vector3DF rotationMedium = new Vector3DF();
                            rotationMedium.X = currentFaceFrame.Rotation.X * gaussFilter[0];
                            rotationMedium.Y = currentFaceFrame.Rotation.Y * gaussFilter[0];
                            int i = 0;
                            while (i < filterLength - 1)
                            {
                                i++;
                                rotationMedium.X += (headRotationHistory[headRotationHistory.Count - 1 - i].X * gaussFilter[i]);
                                rotationMedium.Y += (headRotationHistory[headRotationHistory.Count - 1 - i].Y * gaussFilter[i]);
                            }
                            rotationMedium.X = (float)(rotationMedium.X / gaussFactor);
                            rotationMedium.Y = (float)(rotationMedium.Y / gaussFactor);
                            ScaleXY(rotationMedium, out x, out y);

                            MouseControl.Move(x, y);
                            //Method 4: Quadratische Glättung
                            //double deltaX = ((-currentFaceFrame.Rotation.Y) - (-headRotationHistory.Last().Y));
                            //double deltaY = ((-currentFaceFrame.Rotation.X) - (-headRotationHistory.Last().X));
                            //if (deltaX < 0)
                            //    deltaX = -Math.Pow(deltaX, 2) * 4;
                            //else
                            //    deltaX = Math.Pow(deltaX, 2) * 4;
                            //if (deltaY < 0)
                            //    deltaY = -Math.Pow(deltaY, 2) * 5;
                            //else
                            //    deltaY = Math.Pow(deltaY, 2) * 5;
                            //MouseControl.DeltaMove((int)Math.Round(deltaX, 0), (int)Math.Round(deltaY));
                        }

                        headRotationHistory.Add(currentFaceFrame.Rotation);
                        if (headRotationHistory.Count >= 100)
                        {
                            headRotationHistory.RemoveAt(0);
                        }
                    }
                }
            }
            catch (Exception e)
            {
            }
            finally
            {
                if (colorImageFrame != null)
                {
                    colorImageFrame.Dispose();
                }

                if (depthImageFrame != null)
                {
                    depthImageFrame.Dispose();
                }

                if (skeletonFrame != null)
                {
                    skeletonFrame.Dispose();
                }
            }
        }