示例#1
0
        void kinectSensor_AllFramesReady(object sender, AllFramesReadyEventArgs e)
        {
            // Retrieve each single frame and copy the data
            using (ColorImageFrame colorImageFrame = e.OpenColorImageFrame())
            {
                if (colorImageFrame == null)
                {
                    return;
                }
                colorImageFrame.CopyPixelDataTo(colorPixelData);
                //int strade = colorImageFrame.Width * 4;
                //image1.Source = BitmapSource.Create(colorImageFrame.Width, colorImageFrame.Height, 96, 96,
                //                                    PixelFormats.Bgr32, null, colorPixelData, strade);
            }

            using (DepthImageFrame depthImageFrame = e.OpenDepthImageFrame())
            {
                if (depthImageFrame == null)
                {
                    return;
                }
                depthImageFrame.CopyPixelDataTo(depthPixelData);
            }

            using (SkeletonFrame skeletonFrame = e.OpenSkeletonFrame())
            {
                if (skeletonFrame == null)
                {
                    return;
                }
                skeletonFrame.CopySkeletonDataTo(skeletonData);
            }

            // Retrieve the first tracked skeleton if any. Otherwise, do nothing.
            var skeleton = skeletonData.FirstOrDefault(s => s.TrackingState == SkeletonTrackingState.Tracked);

            if (skeleton == null && !sessionClose)
            {
                serialP.WriteLine("s");
                serialP.WriteLine("c");
                serialP.WriteLine("p");
                serialP.WriteLine("g");
                if (isActive)
                {
                    isActive = false;
                }

                slejenie           = false;
                activatorRightHand = 0;
                activatorLeftHand  = false;
                firstMeet          = false;

                sessionClose = true;
                return;
            }
            else if (skeleton != null && !firstMeet)
            {
                serialP.WriteLine("i");
                playsound(comms[0]);
                firstMeet    = true;
                sessionClose = false;
            }
            if (sessionClose)
            {
                return;
            }
            // Make the faceTracker processing the data.
            FaceTrackFrame faceFrame = faceTracker.Track(kinectSensor.ColorStream.Format, colorPixelData,
                                                         kinectSensor.DepthStream.Format, depthPixelData,
                                                         skeleton);

            EnumIndexableCollection <FeaturePoint, PointF> facePoints = faceFrame.GetProjected3DShape();


            // points of hands and shoulder - to determine HELLO, etc.
            Joint shoulderCenter = skeleton.Joints[JointType.ShoulderCenter];
            Joint head           = skeleton.Joints[JointType.Head];
            Joint rightHand      = skeleton.Joints[JointType.HandRight];
            Joint leftHand       = skeleton.Joints[JointType.HandLeft];

            // initialize sound for hello
            //SoundPlayer a = new SoundPlayer("C:\\sal.wav");


            // open stream for uart reading
            //serialP.Open();

            // points of lip's corner - with help of this I determine smile
            double x1 = facePoints[88].X;
            double y1 = facePoints[88].Y;

            System.Windows.Point leftLip = new System.Windows.Point(x1, y1);
            double x2 = facePoints[89].X;
            double y2 = facePoints[89].Y;

            System.Windows.Point rightLip = new System.Windows.Point(x2, y2);
            Vector subtr = System.Windows.Point.Subtract(leftLip, rightLip);

            // distance between kinect and human
            distance = skeleton.Position.Z * 100;

            // distance between two corners of lip
            double length = Math.Sqrt(subtr.X * subtr.X + subtr.Y * subtr.Y);

            int check = 100;

            double angle1 = 0d;
            double angle2 = 0d;
            double angle  = skeleton.Position.X * 100;

            #region "Smile deterine"
            if (distance >= 95 && distance < 110)
            {
                check = 22;
            }
            else if (distance >= 110 && distance < 120)
            {
                check = 19;
            }
            else if (distance >= 120 && distance < 130)
            {
                check = 18;
            }
            else if (distance >= 130 && distance < 140)
            {
                check = 17;
            }
            else if (distance >= 140 && distance < 150)
            {
                check = 16;
            }
            else if (distance >= 150 && distance < 160)
            {
                check = 14;
            }
            else if (distance >= 160 && distance < 170)
            {
                check = 13;
            }
            else if (distance >= 170 && distance < 180)
            {
                check = 12;
            }
            else if (distance >= 180 && distance < 190)
            {
                check = 11;
            }

            #endregion

            #region "Angle"
            if (distance >= 90 && distance < 110)
            {
                angle1 = -15;
                angle2 = 15;
            }
            else if (distance >= 110 && distance < 150)
            {
                angle1 = -20;
                angle2 = 20;
            }
            else if (distance >= 150 && distance < 170)
            {
                angle1 = -30;
                angle2 = 30;
            }
            else if (distance >= 170 && distance < 200)
            {
                angle1 = -35;
                angle2 = 35;
            }
            else if (distance >= 200)
            {
                angle1 = -40;
                angle2 = 40;
            }
            #endregion

            double condition1 = Math.Abs(leftHand.Position.Z * 100 - shoulderCenter.Position.Z * 100);
            double condition2 = Math.Abs(rightHand.Position.Z * 100 - shoulderCenter.Position.Z * 100);

            // If position of two hands higher than shoulder it's activate 'slejenie za ob'ektom'
            if (condition1 > 45 &&
                condition2 > 45 &&
                leftHand.Position.X < rightHand.Position.X)
            {
                if (!slejenie)
                {
                    isActive       = true;
                    FIXED_DISTANCE = distance;
                    slejenie       = true;
                }
            }

            // The command to stop 'slejenie za ob'ektom'
            if (leftHand.Position.X > rightHand.Position.X)
            {
                isActive = false;
            }

            // Slejenie za ob'ektom
            if (isActive)
            {
                int pinkIs   = (int)typeCondition.THIRD;
                int purpleIs = (int)typeCondition.FORTH;
                int redIs    = (int)typeCondition.FIVTH;
                int yellowIs = (int)typeCondition.SIXTH;

                if (distance > FIXED_DISTANCE + 10.0d)
                {
                    if (angle < angle1)
                    {
                        ellipseSmile.Fill = Brushes.Pink;
                        if (currentAction != pinkIs)//povorot na pravo
                        {
                            currentAction = pinkIs;
                            serialP.WriteLine("r");
                        }
                    }
                    else if (angle > angle2)//povorot na levo
                    {
                        ellipseSmile.Fill = Brushes.Purple;
                        if (currentAction != purpleIs)
                        {
                            currentAction = purpleIs;
                            serialP.WriteLine("l");
                        }
                    }
                    else
                    {
                        ellipseSmile.Fill = Brushes.Red;
                        if (currentAction != redIs)// vpered
                        {
                            currentAction = redIs;
                            serialP.WriteLine("f");
                        }
                    }
                }
                else if (distance > 90)
                {
                    if (angle < angle1)
                    {
                        ellipseSmile.Fill = Brushes.Pink;
                        if (currentAction != pinkIs)//na pravo
                        {
                            currentAction = pinkIs;
                            serialP.WriteLine("r");
                        }
                    }
                    else if (angle > angle2)
                    {
                        ellipseSmile.Fill = Brushes.Purple;
                        if (currentAction != purpleIs)// na levo
                        {
                            currentAction = purpleIs;
                            serialP.WriteLine("l");
                        }
                    }
                    else
                    {
                        ellipseSmile.Fill = Brushes.Yellow;
                        if (currentAction != yellowIs)//stop, ili - do nothing
                        {
                            currentAction = yellowIs;
                            serialP.WriteLine("s");
                        }
                    }
                }
                else
                {
                    ellipseSmile.Fill = Brushes.Yellow;
                    if (currentAction != yellowIs)//stop, ili - do nothing
                    {
                        currentAction = yellowIs;
                        serialP.WriteLine("s");
                    }
                }
            }


            // esli 'slejenie za ob'ektom' otklu4en
            else if (!isActive)
            {
                int blueIs  = (int)typeCondition.FIRST;
                int blackIs = (int)typeCondition.SECOND;
                int onkol   = (int)typeCondition.SEVENTH;

                if (leftHand.Position.Y > head.Position.Y && rightHand.Position.Y < shoulderCenter.Position.Y)
                {
                    ellipseSmile.Fill = Brushes.Blue;
                    if (currentAction != blueIs && !activatorLeftHand)//privet levoi rukoi ----------------------------------------------------------------------------

                    {
                        currentAction = blueIs;
                        serialP.WriteLine("q");
                        activatorLeftHand = true;
                    }
                }

                else if (rightHand.Position.Y > head.Position.Y && leftHand.Position.Y < shoulderCenter.Position.Y)
                {
                    ellipseSmile.Fill = Brushes.Blue;
                    if (currentAction != onkol && activatorRightHand != 12)//privet pravoi rukoi   -----------------------------------------------------------------------------
                    {
                        currentAction = onkol;
                        serialP.WriteLine("w");
                        activatorRightHand = 12;
                    }
                }

                else
                {
                    ellipseSmile.Fill = Brushes.Black;
                    if (currentAction != blackIs)// toktaidy ili do nothing
                    {
                        currentAction = blackIs;
                        serialP.WriteLine("s");
                    }


                    if (currentAction == blackIs)
                    {
                        if (length >= check && currentFace != (int)faceConditions.FIRST)
                        {
                            serialP.WriteLine("z"); // smile
                            currentFace       = (int)faceConditions.FIRST;
                            ellipseSmile.Fill = Brushes.Brown;
                        }
                        else if (length < check && currentFace != (int)faceConditions.SECOND)
                        {
                            serialP.WriteLine("x"); // poker face
                            currentFace       = (int)faceConditions.SECOND;
                            ellipseSmile.Fill = Brushes.Gold;
                        }

                        #region "povoroti golovoi"
                        if (angle < angle1)
                        {
                            ellipseSmile.Fill = Brushes.Pink;
                            if (!headToRight)//povorot golovi na pravo
                            {
                                headToRight  = true;
                                headToCenter = false;
                                headToLeft   = false;
                                serialP.WriteLine("k");
                            }
                        }
                        else if (angle > angle2)//povorot golovi na levo
                        {
                            if (!headToLeft)
                            {
                                headToLeft   = true;
                                headToCenter = false;
                                headToRight  = false;
                                serialP.WriteLine("j");
                            }
                        }
                        else if (angle < angle2 && angle > angle1)//golova v centre
                        {
                            if (!headToCenter)
                            {
                                headToCenter = true;
                                headToRight  = false;
                                headToLeft   = false;
                                serialP.WriteLine("p");
                            }
                        }
                        #endregion
                    }
                    else if (!faceFrame.TrackSuccessful && currentFace != (int)faceConditions.NONE)
                    {
                        serialP.WriteLine("c"); // sad face
                        currentFace       = (int)faceConditions.NONE;
                        ellipseSmile.Fill = Brushes.Chocolate;
                    }
                }
            }

            label2.Content = distance.ToString();
            //label1.Content = (leftHand.Position.Z * 100).ToString();
            //label3.Content = (shoulderCenter.Position.Z * 100).ToString();

            //serialP.Close();
        }
示例#2
0
            /// <summary>
            /// Updates the face tracking information for this skeleton
            /// </summary>
            ///

            public void OnFrameReady(KinectSensor kinectSensor, ColorImageFormat colorImageFormat, byte[] colorImage, DepthImageFormat depthImageFormat, short[] depthImage, Skeleton skeletonOfInterest)
            {
                //No Touchy
                this.skeletonTrackingState = skeletonOfInterest.TrackingState;
                if (this.skeletonTrackingState != SkeletonTrackingState.Tracked)
                {
                    return;
                }
                if (faceTracker == null)
                {
                    faceTracker = new FaceTracker(kinectSensor);
                }
                frame = this.faceTracker.Track(
                    colorImageFormat, colorImage, depthImageFormat, depthImage, skeletonOfInterest);
                this.lastFaceTrackSucceeded = frame.TrackSuccessful;
                if (this.lastFaceTrackSucceeded)
                {
                    if (faceTriangles == null)
                    {
                        faceTriangles = frame.GetTriangles();
                    }
                    this.facePoints = frame.GetProjected3DShape();



                    //Touchy

                    //Assign Reference points
                    this.absfacePoints = frame.Get3DShape();
                    leftForehead       = this.absfacePoints[FeaturePoint.TopLeftForehead];
                    rightForehead      = this.absfacePoints[FeaturePoint.TopRightForehead];
                    jaw           = this.absfacePoints[FeaturePoint.BottomOfChin];
                    faceRotationX = frame.Rotation.X;
                    faceRotationY = frame.Rotation.Y;
                    faceRotationZ = frame.Rotation.Z;

                    //Calculate Reference Points
                    foreheadReferencePointX = ((rightForehead.X - leftForehead.X) / 2);
                    foreheadReferencePointY = ((rightForehead.Y - leftForehead.Y) / 2);
                    foreheadReferencePointZ = ((rightForehead.Z - leftForehead.Z) / 2);

                    //Set Animation Units
                    AUCoeff        = frame.GetAnimationUnitCoefficients();
                    jawLowererAU   = AUCoeff[AnimationUnit.JawLower];
                    lipStretcherAU = AUCoeff[AnimationUnit.LipStretcher];
                    browRaiserAU   = AUCoeff[AnimationUnit.BrowRaiser];
                    setJawData(jaw.Y, leftForehead.Y, rightForehead.Y, jawLowererAU, lipStretcherAU);

                    rotations = new float[5];
                    //set up matlab
                    matlab = new MLApp.MLApp();
                    matlab.Execute(@"cd C:\Users\Bala\Documents\MATLAB");
                    result = null;

                    //get rotation values
                    rotations[0] = faceRotationX;
                    rotations[1] = faceRotationY;
                    rotations[2] = faceRotationZ;
                    rotations[3] = jawLowererAU;
                    rotations[4] = lipStretcherAU;
                    //Set up GlovePie
                    OscPacket.LittleEndianByteOrder = false;
                    IPEndPoint myapp    = new IPEndPoint(IPAddress.Loopback, 1944);
                    IPEndPoint glovepie = new IPEndPoint(IPAddress.Loopback, 1945);
                    Console.WriteLine(browRaiserAU);

                    matlab.Feval("nnW", 1, out result, rotations[0]);
                    object[] resW = result as object[];
                    nnoutput = (int)((float)resW[0] + 0.5f);
                    if (nnoutput == 1)
                    {
                        commandtoSend = 1;
                    }
                    else
                    {
                        result = null;
                        matlab.Feval("nnA", 1, out result, rotations[1]);
                        object[] resA = result as object[];
                        nnoutput = (int)((float)resA[0] + 0.5f);
                        if (nnoutput == 1)
                        {
                            commandtoSend = 2;
                        }
                        else
                        {
                            result = null;
                            matlab.Feval("nnS", 1, out result, rotations[0]);
                            object[] resS = result as object[];
                            nnoutput = (int)((float)resS[0] + 0.5f);
                            if (nnoutput == 1)
                            {
                                commandtoSend = 3;
                            }
                            else
                            {
                                result = null;
                                matlab.Feval("nnd", 1, out result, rotations[1]);
                                object[] resD = result as object[];
                                nnoutput = (int)((float)resD[0] + 0.5f);
                                if (nnoutput == 1)
                                {
                                    commandtoSend = 4;
                                }
                                else
                                {
                                    result = null;
                                    matlab.Feval("nnLC", 1, out result, rotations[2]);
                                    object[] resLC = result as object[];
                                    nnoutput = (int)((float)resLC[0] + 0.5f);
                                    if (nnoutput == 1)
                                    {
                                        commandtoSend = 5;
                                    }
                                    else
                                    {
                                        result = null;
                                        matlab.Feval("nnRC", 1, out result, rotations[2]);
                                        object[] resRC = result as object[];
                                        nnoutput = (int)((float)resRC[0] + 0.5f);
                                        if (nnoutput == 1)
                                        {
                                            commandtoSend = 6;
                                        }
                                        else
                                        {
                                            result = null;
                                            if (jawLowererAU > 0.7)
                                            {
                                                commandtoSend = 7;
                                            }

                                            /*
                                             * matlab.Feval("nnSpace", 1, out result, rotations[3]);
                                             * object[] resSpace = result as object[];
                                             * nnoutput = (int)((float)resSpace[0] + 0.5f);
                                             * if (nnoutput == 1)
                                             * {
                                             *  commandtoSend = 7;
                                             * }*/
                                            else
                                            {
                                                result = null;
                                                if (browRaiserAU > 0.4)
                                                {
                                                    commandtoSend = 8;
                                                }
                                                else
                                                {
                                                    result        = null;
                                                    commandtoSend = 0;
                                                }

                                                /*result = null;
                                                 * matlab.Feval("nnMiddle", 1, out result, lipStretcherAU);
                                                 * object[] resMiddle = result as object[];
                                                 * nnoutput = (int)((float)resMiddle[0] + 0.5f);
                                                 * if (nnoutput == 1)
                                                 * {
                                                 *  commandtoSend = 8;
                                                 * }
                                                 * else
                                                 * {
                                                 *  result = null;
                                                 *  commandtoSend = 0;
                                                 * }*/
                                            }
                                        }
                                    }
                                }
                            }
                        }
                    }
                    //Console.WriteLine("Iteration Complete");
                    switch (commandtoSend)
                    {
                    case 0:
                        msg = new OscMessage(myapp, "/move/w", 0.0f);
                        msg.Send(glovepie);
                        msg = new OscMessage(myapp, "/move/a", 0.0f);
                        msg.Send(glovepie);
                        msg = new OscMessage(myapp, "/move/s", 0.0f);
                        msg.Send(glovepie);
                        msg = new OscMessage(myapp, "/move/d", 0.0f);
                        msg.Send(glovepie);
                        msg = new OscMessage(myapp, "/move/lc", 0.0f);
                        msg.Send(glovepie);
                        msg = new OscMessage(myapp, "/move/rc", 0.0f);
                        msg.Send(glovepie);
                        msg = new OscMessage(myapp, "/move/middle", 0.0f);
                        msg.Send(glovepie);
                        break;

                    case 1:
                        msg = new OscMessage(myapp, "/move/w", 10.0f);
                        msg.Send(glovepie);
                        msg = new OscMessage(myapp, "/move/a", 0.0f);
                        msg.Send(glovepie);
                        msg = new OscMessage(myapp, "/move/s", 0.0f);
                        msg.Send(glovepie);
                        msg = new OscMessage(myapp, "/move/d", 0.0f);
                        msg.Send(glovepie);
                        msg = new OscMessage(myapp, "/move/lc", 0.0f);
                        msg.Send(glovepie);
                        msg = new OscMessage(myapp, "/move/rc", 0.0f);
                        msg.Send(glovepie);
                        msg = new OscMessage(myapp, "/move/space", 0.0f);
                        msg.Send(glovepie);
                        msg = new OscMessage(myapp, "/move/middle", 0.0f);
                        msg.Send(glovepie);
                        break;

                    case 2:
                        msg = new OscMessage(myapp, "/move/w", 0.0f);
                        msg.Send(glovepie);
                        msg = new OscMessage(myapp, "/move/a", 10.0f);
                        msg.Send(glovepie);
                        msg = new OscMessage(myapp, "/move/s", 0.0f);
                        msg.Send(glovepie);
                        msg = new OscMessage(myapp, "/move/d", 0.0f);
                        msg.Send(glovepie);
                        msg = new OscMessage(myapp, "/move/lc", 0.0f);
                        msg.Send(glovepie);
                        msg = new OscMessage(myapp, "/move/rc", 0.0f);
                        msg.Send(glovepie);
                        msg = new OscMessage(myapp, "/move/space", 0.0f);
                        msg.Send(glovepie);
                        msg = new OscMessage(myapp, "/move/middle", 0.0f);
                        msg.Send(glovepie);
                        break;

                    case 3:
                        msg = new OscMessage(myapp, "/move/w", 0.0f);
                        msg.Send(glovepie);
                        msg = new OscMessage(myapp, "/move/a", 0.0f);
                        msg.Send(glovepie);
                        msg = new OscMessage(myapp, "/move/s", 10.0f);
                        msg.Send(glovepie);
                        msg = new OscMessage(myapp, "/move/d", 0.0f);
                        msg.Send(glovepie);
                        msg = new OscMessage(myapp, "/move/lc", 0.0f);
                        msg.Send(glovepie);
                        msg = new OscMessage(myapp, "/move/rc", 0.0f);
                        msg.Send(glovepie);
                        msg = new OscMessage(myapp, "/move/space", 0.0f);
                        msg.Send(glovepie);
                        msg = new OscMessage(myapp, "/move/middle", 0.0f);
                        msg.Send(glovepie);
                        break;

                    case 4:
                        msg = new OscMessage(myapp, "/move/w", 0.0f);
                        msg.Send(glovepie);
                        msg = new OscMessage(myapp, "/move/a", 0.0f);
                        msg.Send(glovepie);
                        msg = new OscMessage(myapp, "/move/s", 0.0f);
                        msg.Send(glovepie);
                        msg = new OscMessage(myapp, "/move/d", 10.0f);
                        msg.Send(glovepie);
                        msg = new OscMessage(myapp, "/move/lc", 0.0f);
                        msg.Send(glovepie);
                        msg = new OscMessage(myapp, "/move/rc", 0.0f);
                        msg.Send(glovepie);
                        msg = new OscMessage(myapp, "/move/space", 0.0f);
                        msg.Send(glovepie);
                        msg = new OscMessage(myapp, "/move/middle", 0.0f);
                        msg.Send(glovepie);
                        break;

                    case 5:
                        msg = new OscMessage(myapp, "/move/w", 0.0f);
                        msg.Send(glovepie);
                        msg = new OscMessage(myapp, "/move/a", 0.0f);
                        msg.Send(glovepie);
                        msg = new OscMessage(myapp, "/move/s", 0.0f);
                        msg.Send(glovepie);
                        msg = new OscMessage(myapp, "/move/d", 0.0f);
                        msg.Send(glovepie);
                        msg = new OscMessage(myapp, "/move/lc", 10.0f);
                        msg.Send(glovepie);
                        msg = new OscMessage(myapp, "/move/rc", 0.0f);
                        msg.Send(glovepie);
                        msg = new OscMessage(myapp, "/move/space", 0.0f);
                        msg.Send(glovepie);
                        msg = new OscMessage(myapp, "/move/middle", 0.0f);
                        msg.Send(glovepie);
                        break;

                    case 6:
                        msg = new OscMessage(myapp, "/move/w", 0.0f);
                        msg.Send(glovepie);
                        msg = new OscMessage(myapp, "/move/a", 0.0f);
                        msg.Send(glovepie);
                        msg = new OscMessage(myapp, "/move/s", 0.0f);
                        msg.Send(glovepie);
                        msg = new OscMessage(myapp, "/move/d", 0.0f);
                        msg.Send(glovepie);
                        msg = new OscMessage(myapp, "/move/lc", 0.0f);
                        msg.Send(glovepie);
                        msg = new OscMessage(myapp, "/move/rc", 10.0f);
                        msg.Send(glovepie);
                        msg = new OscMessage(myapp, "/move/space", 0.0f);
                        msg.Send(glovepie);
                        msg = new OscMessage(myapp, "/move/middle", 0.0f);
                        msg.Send(glovepie);
                        break;

                    case 7:
                        msg = new OscMessage(myapp, "/move/w", 0.0f);
                        msg.Send(glovepie);
                        msg = new OscMessage(myapp, "/move/a", 0.0f);
                        msg.Send(glovepie);
                        msg = new OscMessage(myapp, "/move/s", 0.0f);
                        msg.Send(glovepie);
                        msg = new OscMessage(myapp, "/move/d", 0.0f);
                        msg.Send(glovepie);
                        msg = new OscMessage(myapp, "/move/lc", 0.0f);
                        msg.Send(glovepie);
                        msg = new OscMessage(myapp, "/move/rc", 0.0f);
                        msg.Send(glovepie);
                        msg = new OscMessage(myapp, "/move/space", 10.0f);
                        msg.Send(glovepie);
                        msg = new OscMessage(myapp, "/move/middle", 0.0f);
                        msg.Send(glovepie);
                        break;

                    case 8:
                        msg = new OscMessage(myapp, "/move/w", 0.0f);
                        msg.Send(glovepie);
                        msg = new OscMessage(myapp, "/move/a", 0.0f);
                        msg.Send(glovepie);
                        msg = new OscMessage(myapp, "/move/s", 0.0f);
                        msg.Send(glovepie);
                        msg = new OscMessage(myapp, "/move/d", 0.0f);
                        msg.Send(glovepie);
                        msg = new OscMessage(myapp, "/move/lc", 0.0f);
                        msg.Send(glovepie);
                        msg = new OscMessage(myapp, "/move/rc", 0.0f);
                        msg.Send(glovepie);
                        msg = new OscMessage(myapp, "/move/space", 0.0f);
                        msg.Send(glovepie);
                        msg = new OscMessage(myapp, "/move/middle", 10.0f);
                        msg.Send(glovepie);
                        break;
                    }
                }
            }
示例#3
0
        private void OnAllFramesReady(object sender, Microsoft.Kinect.AllFramesReadyEventArgs allFramesReadyEventArgs)
        {
            ColorImageFrame colorImageFrame = null;
            DepthImageFrame depthImageFrame = null;
            SkeletonFrame   skeletonFrame   = null;

            try
            {
                colorImageFrame = allFramesReadyEventArgs.OpenColorImageFrame();
                depthImageFrame = allFramesReadyEventArgs.OpenDepthImageFrame();
                skeletonFrame   = allFramesReadyEventArgs.OpenSkeletonFrame();

                if (colorImageFrame == null || depthImageFrame == null || skeletonFrame == null)
                {
                    return;
                }

                // Check for image format changes.  The FaceTracker doesn't
                // deal with that so we need to reset.
                if (this.depthImageFormat != depthImageFrame.Format)
                {
                    this.depthImage       = null;
                    this.depthImageFormat = depthImageFrame.Format;
                }

                if (this.colorImageFormat != colorImageFrame.Format)
                {
                    this.colorImage       = null;
                    this.colorImageFormat = colorImageFrame.Format;
                }

                // Create any buffers to store copies of the data we work with
                if (this.depthImage == null)
                {
                    this.depthImage = new short[depthImageFrame.PixelDataLength];
                }

                if (this.colorImage == null)
                {
                    this.colorImage = new byte[colorImageFrame.PixelDataLength];
                }

                // Get the skeleton information
                if (this.SkeletonData == null || this.SkeletonData.Length != skeletonFrame.SkeletonArrayLength)
                {
                    this.SkeletonData = new Skeleton[skeletonFrame.SkeletonArrayLength];
                }

                colorImageFrame.CopyPixelDataTo(this.colorImage);
                depthImageFrame.CopyPixelDataTo(this.depthImage);
                skeletonFrame.CopySkeletonDataTo(this.SkeletonData);
                Skeleton activeSkeleton = null;
                activeSkeleton = (from skel in this.SkeletonData where skel.TrackingState == SkeletonTrackingState.Tracked select skel).FirstOrDefault();


                //Idea: Separate Eye-Parts of Color Image
                //Use learning Algorithm for right and left eye
                //Detect blink on separated parts of color Image

                //colorImage is one dimensional array with 640 x 480 x 4 (RGBA) values


                if (activeSkeleton != null)
                {
                    FaceTrackFrame currentFaceFrame = faceTracker.Track(ColorImageFormat.RgbResolution640x480Fps30, colorImage, depthImageFormat, depthImage, activeSkeleton);
                    float          browRaiserValue  = currentFaceFrame.GetAnimationUnitCoefficients()[AnimationUnit.BrowRaiser];
                    float          browLowererValue = currentFaceFrame.GetAnimationUnitCoefficients()[AnimationUnit.BrowLower];
                    tbBrowLowerer.Text = browLowererValue.ToString();
                    tbBrowRaiser.Text  = browRaiserValue.ToString();
                    //Get relevant Points for blink detection
                    //Left eye
                    int    minX    = (int)Math.Round(currentFaceFrame.GetProjected3DShape()[FeaturePoint.AboveOneFourthLeftEyelid].X);
                    int    minY    = (int)Math.Round(currentFaceFrame.GetProjected3DShape()[FeaturePoint.AboveOneFourthLeftEyelid].Y);
                    int    maxX    = (int)Math.Round(currentFaceFrame.GetProjected3DShape()[FeaturePoint.BelowThreeFourthLeftEyelid].X);
                    int    maxY    = (int)Math.Round(currentFaceFrame.GetProjected3DShape()[FeaturePoint.BelowThreeFourthLeftEyelid].Y);
                    Bitmap leftEye = EyeExtract(colorImageFrame, currentFaceFrame, minX, minY, maxX, maxY, false);
                    pbLeftEye.Image = leftEye;

                    //Right eye
                    minX = (int)Math.Round(currentFaceFrame.GetProjected3DShape()[FeaturePoint.AboveThreeFourthRightEyelid].X);
                    minY = (int)Math.Round(currentFaceFrame.GetProjected3DShape()[FeaturePoint.AboveThreeFourthRightEyelid].Y);
                    maxX = (int)Math.Round(currentFaceFrame.GetProjected3DShape()[FeaturePoint.OneFourthBottomRightEyelid].X);
                    maxY = (int)Math.Round(currentFaceFrame.GetProjected3DShape()[FeaturePoint.OneFourthBottomRightEyelid].Y);

                    Bitmap rightEye = EyeExtract(colorImageFrame, currentFaceFrame, minX, minY, maxX, maxY, true);
                    pbRightEye.Image = rightEye;

                    //Wende Kantenfilter auf die beiden Augen an.
                    double dxRight;
                    double dyRight;
                    double dxLeft;
                    double dyLeft;
                    if (rightEye != null && leftEye != null)
                    {
                        Bitmap edgePicRight = Convolution(ConvertGrey(rightEye), true, out dxRight, out dyRight);
                        Bitmap edgePicLeft  = Convolution(ConvertGrey(leftEye), false, out dxLeft, out dyLeft);



                        //If Face is rotated, move Mouse
                        if (headRotationHistory.Count > filterLength && currentFaceFrame.TrackSuccessful)
                        {
                            int x = 0;
                            int y = 0;

                            //Method 1: Ohne Glättung
                            //ScaleXY(currentFaceFrame.Rotation, out x, out y);
                            //MouseControl.Move(x, y);

                            ////Method 2: Glättung über die letzten x Bilder:
                            //int i = 0;
                            //Vector3DF rotationMedium = new Vector3DF();
                            //while (i < 10 && headRotationHistory.Count - 1 > i)
                            //{
                            //    i++;
                            //    rotationMedium.X += headRotationHistory[headRotationHistory.Count - 1 - i].X;
                            //    rotationMedium.Y += headRotationHistory[headRotationHistory.Count - 1 - i].Y;
                            //}
                            //rotationMedium.X = rotationMedium.X / i;
                            //rotationMedium.Y = rotationMedium.Y / i;
                            //ScaleXY(rotationMedium, out x, out y);
                            //MouseControl.Move(x, y);

                            //Method 3: Gauß-Filter: Gewichte die letzten Bilder stärker.



                            Vector3DF rotationMedium = new Vector3DF();
                            rotationMedium.X = currentFaceFrame.Rotation.X * gaussFilter[0];
                            rotationMedium.Y = currentFaceFrame.Rotation.Y * gaussFilter[0];
                            int i = 0;
                            while (i < filterLength - 1)
                            {
                                i++;
                                rotationMedium.X += (headRotationHistory[headRotationHistory.Count - 1 - i].X * gaussFilter[i]);
                                rotationMedium.Y += (headRotationHistory[headRotationHistory.Count - 1 - i].Y * gaussFilter[i]);
                            }
                            rotationMedium.X = (float)(rotationMedium.X / gaussFactor);
                            rotationMedium.Y = (float)(rotationMedium.Y / gaussFactor);
                            ScaleXY(rotationMedium, out x, out y);

                            MouseControl.Move(x, y);
                            //Method 4: Quadratische Glättung
                            //double deltaX = ((-currentFaceFrame.Rotation.Y) - (-headRotationHistory.Last().Y));
                            //double deltaY = ((-currentFaceFrame.Rotation.X) - (-headRotationHistory.Last().X));
                            //if (deltaX < 0)
                            //    deltaX = -Math.Pow(deltaX, 2) * 4;
                            //else
                            //    deltaX = Math.Pow(deltaX, 2) * 4;
                            //if (deltaY < 0)
                            //    deltaY = -Math.Pow(deltaY, 2) * 5;
                            //else
                            //    deltaY = Math.Pow(deltaY, 2) * 5;
                            //MouseControl.DeltaMove((int)Math.Round(deltaX, 0), (int)Math.Round(deltaY));
                        }

                        headRotationHistory.Add(currentFaceFrame.Rotation);
                        if (headRotationHistory.Count >= 100)
                        {
                            headRotationHistory.RemoveAt(0);
                        }
                    }
                }
            }
            catch (Exception e)
            {
            }
            finally
            {
                if (colorImageFrame != null)
                {
                    colorImageFrame.Dispose();
                }

                if (depthImageFrame != null)
                {
                    depthImageFrame.Dispose();
                }

                if (skeletonFrame != null)
                {
                    skeletonFrame.Dispose();
                }
            }
        }
示例#4
0
        private void AllFramesReady(object sender, AllFramesReadyEventArgs allFramesReadyEventArgs)
        {
            ColorImageFrame colorImageFrame = null;
            DepthImageFrame depthImageFrame = null;
            SkeletonFrame   skeletonFrame   = null;

            try
            {
                colorImageFrame = allFramesReadyEventArgs.OpenColorImageFrame();
                depthImageFrame = allFramesReadyEventArgs.OpenDepthImageFrame();
                skeletonFrame   = allFramesReadyEventArgs.OpenSkeletonFrame();

                if (colorImageFrame == null || depthImageFrame == null || skeletonFrame == null)
                {
                    return;
                }

                // Check for changes in any of the data this function is receiving
                // and reset things appropriately.
                if (this.depthImageFormat != depthImageFrame.Format)
                {
                    this.DestroyFaceTracker();
                    this.depthImage       = null;
                    this.depthImageFormat = depthImageFrame.Format;
                }

                if (this.colorImageFormat != colorImageFrame.Format)
                {
                    this.DestroyFaceTracker();
                    this.colorImage               = null;
                    this.colorImageFormat         = colorImageFrame.Format;
                    this.colorImageWritableBitmap = null;
                    this.ColorImage.Source        = null;
                    this.theMaterial.Brush        = null;
                }

                if (this.skeletonData != null && this.skeletonData.Length != skeletonFrame.SkeletonArrayLength)
                {
                    this.skeletonData = null;
                }

                // Create any buffers to store copies of the data we work with
                if (this.depthImage == null)
                {
                    this.depthImage = new short[depthImageFrame.PixelDataLength];
                }

                if (this.colorImage == null)
                {
                    this.colorImage = new byte[colorImageFrame.PixelDataLength];
                }

                if (this.colorImageWritableBitmap == null)
                {
                    this.colorImageWritableBitmap = new WriteableBitmap(
                        colorImageFrame.Width, colorImageFrame.Height, 96, 96, PixelFormats.Bgr32, null);
                    this.ColorImage.Source = this.colorImageWritableBitmap;
                    this.theMaterial.Brush = new ImageBrush(this.colorImageWritableBitmap)
                    {
                        ViewportUnits = BrushMappingMode.Absolute
                    };
                }

                if (this.skeletonData == null)
                {
                    this.skeletonData = new Skeleton[skeletonFrame.SkeletonArrayLength];
                }

                // Copy data received in this event to our buffers.
                colorImageFrame.CopyPixelDataTo(this.colorImage);
                depthImageFrame.CopyPixelDataTo(this.depthImage);
                skeletonFrame.CopySkeletonDataTo(this.skeletonData);
                this.colorImageWritableBitmap.WritePixels(
                    new Int32Rect(0, 0, colorImageFrame.Width, colorImageFrame.Height),
                    this.colorImage,
                    colorImageFrame.Width * Bgr32BytesPerPixel,
                    0);

                // Find a skeleton to track.
                // First see if our old one is good.
                // When a skeleton is in PositionOnly tracking state, don't pick a new one
                // as it may become fully tracked again.
                Skeleton skeletonOfInterest =
                    this.skeletonData.FirstOrDefault(
                        skeleton =>
                        skeleton.TrackingId == this.trackingId &&
                        skeleton.TrackingState != SkeletonTrackingState.NotTracked);

                if (skeletonOfInterest == null)
                {
                    // Old one wasn't around.  Find any skeleton that is being tracked and use it.
                    skeletonOfInterest =
                        this.skeletonData.FirstOrDefault(
                            skeleton => skeleton.TrackingState == SkeletonTrackingState.Tracked);

                    if (skeletonOfInterest != null)
                    {
                        // This may be a different person so reset the tracker which
                        // could have tuned itself to the previous person.
                        if (this.faceTracker != null)
                        {
                            this.faceTracker.ResetTracking();
                        }

                        this.trackingId = skeletonOfInterest.TrackingId;
                    }
                }

                bool displayFaceMesh = false;

                if (skeletonOfInterest != null && skeletonOfInterest.TrackingState == SkeletonTrackingState.Tracked)
                {
                    if (this.faceTracker == null)
                    {
                        try
                        {
                            this.faceTracker = new FaceTracker(this.Kinect);
                        }
                        catch (InvalidOperationException)
                        {
                            // During some shutdown scenarios the FaceTracker
                            // is unable to be instantiated.  Catch that exception
                            // and don't track a face.
                            Debug.WriteLine("AllFramesReady - creating a new FaceTracker threw an InvalidOperationException");
                            this.faceTracker = null;
                        }
                    }

                    if (this.faceTracker != null)
                    {
                        FaceTrackFrame faceTrackFrame = this.faceTracker.Track(
                            this.colorImageFormat,
                            this.colorImage,
                            this.depthImageFormat,
                            this.depthImage,
                            skeletonOfInterest);

                        if (faceTrackFrame.TrackSuccessful)
                        {
                            this.UpdateMesh(faceTrackFrame);

                            // Only display the face mesh if there was a successful track.
                            displayFaceMesh = true;
                        }
                    }
                }
                else
                {
                    this.trackingId = -1;
                }

                this.viewport3d.Visibility = displayFaceMesh ? Visibility.Visible : Visibility.Hidden;
            }
            finally
            {
                if (colorImageFrame != null)
                {
                    colorImageFrame.Dispose();
                }

                if (depthImageFrame != null)
                {
                    depthImageFrame.Dispose();
                }

                if (skeletonFrame != null)
                {
                    skeletonFrame.Dispose();
                }
            }
        }
示例#5
0
            /// <summary>
            /// Updates the face tracking information for this skeleton
            /// </summary>
            internal void OnFrameReady(KinectSensor kinectSensor, ColorImageFormat colorImageFormat, byte[] colorImage, DepthImageFormat depthImageFormat, short[] depthImage, Skeleton skeletonOfInterest)
            {
                this.skeletonTrackingState = skeletonOfInterest.TrackingState;

                if (this.skeletonTrackingState != SkeletonTrackingState.Tracked)
                {
                    // nothing to do with an untracked skeleton.
                    return;
                }

                if (this.faceTracker == null)
                {
                    try
                    {
                        this.faceTracker = new FaceTracker(kinectSensor);
                    }
                    catch (InvalidOperationException)
                    {
                        // During some shutdown scenarios the FaceTracker
                        // is unable to be instantiated.  Catch that exception
                        // and don't track a face.
                        Debug.WriteLine("AllFramesReady - creating a new FaceTracker threw an InvalidOperationException");
                        this.faceTracker = null;
                    }
                }

                if (this.faceTracker != null)
                {
                    FaceTrackFrame frame = this.faceTracker.Track(
                        colorImageFormat, colorImage, depthImageFormat, depthImage, skeletonOfInterest);
                    this.lastFaceTrackSucceeded = frame.TrackSuccessful;
                    if (this.lastFaceTrackSucceeded)
                    {
                        if (faceTriangles == null)
                        {
                            //    only need to get this once.  It doesn't change.
                            faceTriangles = frame.GetTriangles();
                        }

                        this.facePoints3DRaw = frame.Get3DShape();
                        this.facePoints      = frame.GetProjected3DShape();
                        animationUnitsRaw    = frame.GetAnimationUnitCoefficients();
                    }
                    x              = frame.Rotation.X;
                    y              = frame.Rotation.Y;
                    z              = frame.Rotation.Z;
                    facePointS3D   = this.facePoints3DRaw;
                    animationUnits = animationUnitsRaw;
                    //Debug.WriteLine(animationUnits[AnimationUnit.JawLower]);
                    //Debug.WriteLine(animationUnits[AnimationUnit.BrowLower]);
                    //Debug.WriteLine(animationUnits[AnimationUnit.BrowRaiser]);
                    //Debug.WriteLine(animationUnits[AnimationUnit.JawLower]);
                    //Debug.WriteLine(animationUnits[AnimationUnit.LipCornerDepressor]);
                    //Debug.WriteLine(animationUnits[AnimationUnit.LipRaiser]);
                    //Debug.WriteLine(animationUnits[AnimationUnit.LipStretcher]);
                    //Debug.WriteLine(frame.Translation.ToString());
                    //Debug.WriteLine(frame.Rotation.ToString());
                    //this.facePoints[FeaturePoint.AboveChin].X+2;
                    //Debug.WriteLine(frame.Translation.X.ToString());
                    //Debug.WriteLine(frame.Translation.Y.ToString());
                    //Debug.WriteLine(frame.Translation.Z.ToString());
                }
            }
示例#6
0
            private bool CheckFace(KinectSensor kinectSensor, ColorImageFormat colorImageFormat, byte[] colorImage, DepthImageFormat depthImageFormat, short[] depthImage, Skeleton skeletonOfInterest)
            {
                this.skeletonTrackingState = skeletonOfInterest.TrackingState;

                if (this.skeletonTrackingState != SkeletonTrackingState.Tracked)
                {
                    // nothing to do with an untracked skeleton.
                    return(false);
                }

                if (this.faceTracker == null)
                {
                    try
                    {
                        this.faceTracker = new FaceTracker(kinectSensor);
                    }
                    catch (InvalidOperationException)
                    {
                        // During some shutdown scenarios the FaceTracker
                        // is unable to be instantiated.  Catch that exception
                        // and don't track a face.
                        Debug.WriteLine("AllFramesReady - creating a new FaceTracker threw an InvalidOperationException");
                        this.faceTracker = null;
                    }
                }

                if (this.faceTracker != null)
                {
                    FaceTrackFrame frame = this.faceTracker.Track(
                        colorImageFormat, colorImage, depthImageFormat, depthImage, skeletonOfInterest);

                    this.lastFaceTrackSucceeded = frame.TrackSuccessful;
                    if (this.lastFaceTrackSucceeded)
                    {
                        if (faceTriangles == null)
                        {
                            // only need to get this once.  It doesn't change.
                            faceTriangles = frame.GetTriangles();
                        }

                        //getting the Animation Unit Coefficients
                        this.AUs = frame.GetAnimationUnitCoefficients();
                        var jawLowerer   = AUs[AnimationUnit.JawLower];
                        var browLower    = AUs[AnimationUnit.BrowLower];
                        var browRaiser   = AUs[AnimationUnit.BrowRaiser];
                        var lipDepressor = AUs[AnimationUnit.LipCornerDepressor];
                        var lipRaiser    = AUs[AnimationUnit.LipRaiser];
                        var lipStretcher = AUs[AnimationUnit.LipStretcher];
                        //set up file for output
                        using (System.IO.StreamWriter file = new System.IO.StreamWriter
                                                                 (@"C:\Users\Public\data.txt"))
                        {
                            file.WriteLine("FaceTrack Data, started recording at " + DateTime.Now.ToString("HH:mm:ss tt"));
                        }

                        //here is the algorithm to test different facial features

                        //BrowLower is messed up if you wear glasses, works if you don't wear 'em

                        string state = "";

                        //surprised
                        if ((jawLowerer < 0.25 || jawLowerer > 0.25) && browLower < 0)
                        {
                            state = "surprised";
                        }
                        //smiling
                        if (lipStretcher > 0.4 || lipDepressor < 0)
                        {
                            state = "smiling";
                        }
                        //sad
                        if (browRaiser < 0 && lipDepressor > 0)
                        {
                            state = "sad";
                        }
                        //angry
                        if ((browLower > 0 && (jawLowerer > 0.25 || jawLowerer < -0.25)) ||
                            (browLower > 0 && lipDepressor > 0))
                        {
                            state = "angry";
                        }
                        //System.Diagnostics.Debug.WriteLine(browLower);

                        this.facePoints = frame.GetProjected3DShape();

                        if (states[currentState] == state)
                        {
                            Trace.WriteLine("Yo!");
                            return(true);
                        }
                    }
                }

                return(false);
            }
示例#7
0
            /// <summary>
            /// Updates the face tracking information for this skeleton
            /// </summary>
            internal void OnFrameReady(KinectSensor kinectSensor, ColorImageFormat colorImageFormat, byte[] colorImage, DepthImageFormat depthImageFormat, short[] depthImage, Skeleton skeletonOfInterest)
            {
                this.skeletonTrackingState = skeletonOfInterest.TrackingState;

                if (this.skeletonTrackingState != SkeletonTrackingState.Tracked)
                {
                    // if the current skeleton is not tracked, track it now
                    //kinectSensor.SkeletonStream.ChooseSkeletons(skeletonOfInterest.TrackingId);
                }

                if (this.faceTracker == null)
                {
                    try
                    {
                        this.faceTracker = new FaceTracker(kinectSensor);
                    }
                    catch (InvalidOperationException)
                    {
                        // During some shutdown scenarios the FaceTracker
                        // is unable to be instantiated.  Catch that exception
                        // and don't track a face.
                        Debug.WriteLine("AllFramesReady - creating a new FaceTracker threw an InvalidOperationException");
                        this.faceTracker = null;
                    }
                }

                if (this.faceTracker != null)
                {
                    // hack to make this face tracking detect the face even when it is not actually tracked
                    // <!>need to confirm if it works
                    //skeletonOfInterest.TrackingState = SkeletonTrackingState.Tracked;

                    FaceTrackFrame frame = this.faceTracker.Track(
                        colorImageFormat, colorImage, depthImageFormat, depthImage, skeletonOfInterest);
                    //new Microsoft.Kinect.Toolkit.FaceTracking.Rect(skeletonOfInterest.Position.));

                    this.lastFaceTrackSucceeded = frame.TrackSuccessful;
                    if (this.lastFaceTrackSucceeded)
                    {
                        if (faceTriangles == null)
                        {
                            // only need to get this once.  It doesn't change.
                            faceTriangles = frame.GetTriangles();
                        }
                        if (faceTag == null)
                        {
                            // here call the face detection
                            faceTag = new FaceRecognizer().getFaceTag(this.colorImageBmp);

                            if (faceTag != null)
                            {
                                Global.StatusBarText.Text = "Found " + faceTag + "!";
                                if (Global.trackedPeople.ContainsKey(skeletonOfInterest))
                                {
                                    Global.trackedPeople[skeletonOfInterest] = faceTag;
                                }
                                else
                                {
                                    Global.trackedPeople.Add(skeletonOfInterest, faceTag);
                                }
                            }
                        }
                        this.facePoints = frame.GetProjected3DShape();
                        this.faceRect   = frame.FaceRect;
                    }
                }
            }
示例#8
0
        private void OnAllFramesReady(object sender, AllFramesReadyEventArgs allFramesReadyEventArgs)
        {
            ColorImageFrame colorImageFrame = null;
            DepthImageFrame depthImageFrame = null;
            SkeletonFrame   skeletonFrame   = null;

            File.AppendAllText("mouseLog.txt", DateTime.Now + " - All Kinect frames ready.\n");
            try
            {
                colorImageFrame = allFramesReadyEventArgs.OpenColorImageFrame();
                depthImageFrame = allFramesReadyEventArgs.OpenDepthImageFrame();
                skeletonFrame   = allFramesReadyEventArgs.OpenSkeletonFrame();

                if (colorImageFrame == null || depthImageFrame == null || skeletonFrame == null)
                {
                    File.AppendAllText("mouseLog.txt", DateTime.Now + " - Color- depth or Skeletonframe is null. Aborting Frame.\n");
                    return;
                }

                // Check for image format changes.  The FaceTracker doesn't
                // deal with that so we need to reset.
                if (this.depthImageFormat != depthImageFrame.Format)
                {
                    this.depthImage       = null;
                    this.depthImageFormat = depthImageFrame.Format;
                }

                if (this.colorImageFormat != colorImageFrame.Format)
                {
                    this.colorImage       = null;
                    this.colorImageFormat = colorImageFrame.Format;
                }

                // Create any buffers to store copies of the data we work with
                if (this.depthImage == null)
                {
                    this.depthImage = new short[depthImageFrame.PixelDataLength];
                }

                if (this.colorImage == null)
                {
                    this.colorImage = new byte[colorImageFrame.PixelDataLength];
                }


                // Get the skeleton information
                if (this.SkeletonData == null || this.SkeletonData.Length != skeletonFrame.SkeletonArrayLength)
                {
                    this.SkeletonData = new Skeleton[skeletonFrame.SkeletonArrayLength];
                }

                colorImageFrame.CopyPixelDataTo(this.colorImage);
                depthImageFrame.CopyPixelDataTo(this.depthImage);
                skeletonFrame.CopySkeletonDataTo(this.SkeletonData);
                Skeleton activeSkeleton = null;
                activeSkeleton = (from skel in this.SkeletonData where skel.TrackingState == SkeletonTrackingState.Tracked select skel).FirstOrDefault();

                //Idea: Separate Eye-Parts of Color Image
                //Use learning Algorithm for right and left eye
                //Detect blink on separated parts of color Image

                //colorImage is one dimensional array with 640 x 480 x 4 (RGBA) values

                if (activeSkeleton != null)
                {
                    File.AppendAllText("mouseLog.txt", DateTime.Now + " - Skeleton is there. Trying to find face.\n");
                    FaceTrackFrame currentFaceFrame = faceTracker.Track(ColorImageFormat.RgbResolution640x480Fps30, colorImage, depthImageFormat, depthImage, activeSkeleton);
                    if (currentFaceFrame.TrackSuccessful)
                    {
                        File.AppendAllText("mouseLog.txt", DateTime.Now + " - Recognized face successfully.\n");
                    }
                    else
                    {
                        File.AppendAllText("mouseLog.txt", DateTime.Now + " - Couldn't find face in frame.\n");
                    }

                    //Get relevant Points for blink detection
                    //Left eye
                    int    minX    = (int)Math.Round(currentFaceFrame.GetProjected3DShape()[FeaturePoint.AboveOneFourthLeftEyelid].X);
                    int    minY    = (int)Math.Round(currentFaceFrame.GetProjected3DShape()[FeaturePoint.AboveOneFourthLeftEyelid].Y);
                    int    maxX    = (int)Math.Round(currentFaceFrame.GetProjected3DShape()[FeaturePoint.BelowThreeFourthLeftEyelid].X);
                    int    maxY    = (int)Math.Round(currentFaceFrame.GetProjected3DShape()[FeaturePoint.BelowThreeFourthLeftEyelid].Y);
                    Bitmap leftEye = EyeExtract(colorImageFrame, currentFaceFrame, minX, minY, maxX, maxY, false);
                    //this.pbRight.BeginInvoke((MethodInvoker)(() => this.pbRight.Image = leftEye));
                    //

                    //Right eye
                    minX = (int)Math.Round(currentFaceFrame.GetProjected3DShape()[FeaturePoint.AboveThreeFourthRightEyelid].X);
                    minY = (int)Math.Round(currentFaceFrame.GetProjected3DShape()[FeaturePoint.AboveThreeFourthRightEyelid].Y);
                    maxX = (int)Math.Round(currentFaceFrame.GetProjected3DShape()[FeaturePoint.OneFourthBottomRightEyelid].X);
                    maxY = (int)Math.Round(currentFaceFrame.GetProjected3DShape()[FeaturePoint.OneFourthBottomRightEyelid].Y);

                    Bitmap rightEye  = EyeExtract(colorImageFrame, currentFaceFrame, minX, minY, maxX, maxY, true);
                    Bitmap leftEye2  = null;
                    Bitmap rightEye2 = null;
                    if (leftEye != null)
                    {
                        leftEye2 = new Bitmap(leftEye);
                    }
                    if (rightEye != null)
                    {
                        rightEye2 = new Bitmap(rightEye);
                    }
                    // System.Delegate d = new MethodInvoker(SetPictures));
                    //   this.Invoke(SetPictures, leftEye);
                    //pbRight.Image = rightEye;
                    this.pbLeft.BeginInvoke((MethodInvoker)(() => this.pbLeft.Image = rightEye2));
                    this.pbLeft.BeginInvoke((MethodInvoker)(() => this.pbRight.Image = leftEye2));
                    // this.Invoke(new MethodInvoker(SetPictures));
                    //Wende Kantenfilter auf die beiden Augen an.

                    if (rightEye != null && leftEye != null)
                    {
                        Dictionary <string, int> angleCount;
                        Bitmap edgePicRight   = Convolution(ConvertGrey(rightEye), true, out angleCount);
                        bool   rightEyeClosed = IsEyeClosed(angleCount);
                        Bitmap edgePicLeft    = Convolution(ConvertGrey(leftEye), false, out angleCount);
                        bool   leftEyeClosed  = IsEyeClosed(angleCount);
                        //   pbLeftFaltung.Image = edgePicLeft;
                        //   pbRightFaltung.Image = edgePicRight;



                        if (rightEyeClosedHistory.Count > 100)
                        {
                            rightEyeClosedHistory.RemoveAt(0);
                        }
                        if (leftEyeClosedHistory.Count > 100)
                        {
                            leftEyeClosedHistory.RemoveAt(0);
                        }
                        leftEyeClosedHistory.Add(leftEyeClosed);
                        rightEyeClosedHistory.Add(rightEyeClosed);

                        //If Face is rotated, move Mouse
                        if (headRotationHistory.Count > gaussFilter.Count - 1 && leftEyeClosedHistory.Count > nudConvolutionFilterLength.Value && currentFaceFrame.TrackSuccessful)
                        {
                            int   x = 0;
                            int   y = 0;
                            float browRaiserValue  = currentFaceFrame.GetAnimationUnitCoefficients()[AnimationUnit.BrowRaiser];
                            float browLowererValue = currentFaceFrame.GetAnimationUnitCoefficients()[AnimationUnit.BrowLower];
                            float mouthOpenValue   = currentFaceFrame.GetAnimationUnitCoefficients()[AnimationUnit.JawLower];
                            if (browRaiserHistory.Count >= 100)
                            {
                                browRaiserHistory.RemoveAt(0);
                                browLowererHistory.RemoveAt(0);
                                mouthOpenHistory.RemoveAt(0);
                            }
                            browLowererHistory.Add(browLowererValue);
                            browRaiserHistory.Add(browRaiserValue);
                            mouthOpenHistory.Add(mouthOpenValue);

                            //Method 1: Ohne Glättung
                            //ScaleXY(currentFaceFrame.Rotation, out x, out y);
                            //MouseControl.Move(x, y);

                            ////Method 2: Glättung über die letzten x Bilder:
                            //int i = 0;
                            //Vector3DF rotationMedium = new Vector3DF();
                            //while (i < 10 && headRotationHistory.Count - 1 > i)
                            //{
                            //    i++;
                            //    rotationMedium.X += headRotationHistory[headRotationHistory.Count - 1 - i].X;
                            //    rotationMedium.Y += headRotationHistory[headRotationHistory.Count - 1 - i].Y;
                            //}
                            //rotationMedium.X = rotationMedium.X / i;
                            //rotationMedium.Y = rotationMedium.Y / i;
                            //ScaleXY(rotationMedium, out x, out y);
                            //MouseControl.Move(x, y);

                            //Method 3: Gauß-Filter: Gewichte die letzten Bilder stärker.
                            Vector3DF rotationMedium = new Vector3DF();
                            rotationMedium.X = currentFaceFrame.Rotation.X * gaussFilter[0];
                            rotationMedium.Y = currentFaceFrame.Rotation.Y * gaussFilter[0];
                            int i = 0;
                            while (i < gaussFilter.Count - 1)
                            {
                                rotationMedium.X += (headRotationHistory[headRotationHistory.Count - 1 - i].X * gaussFilter[i]);
                                rotationMedium.Y += (headRotationHistory[headRotationHistory.Count - 1 - i].Y * gaussFilter[i]);
                                i++;
                            }
                            rotationMedium.X = (float)(rotationMedium.X / gaussFactor);
                            rotationMedium.Y = (float)(rotationMedium.Y / gaussFactor);
                            ScaleXY(rotationMedium, out x, out y);


                            //Method 4: Quadratische Glättung
                            //double deltaX = ((-currentFaceFrame.Rotation.Y) - (-headRotationHistory.Last().Y));
                            //double deltaY = ((-currentFaceFrame.Rotation.X) - (-headRotationHistory.Last().X));
                            //if (deltaX < 0)
                            //    deltaX = -Math.Pow(deltaX, 2) * 4;
                            //else
                            //    deltaX = Math.Pow(deltaX, 2) * 4;
                            //if (deltaY < 0)
                            //    deltaY = -Math.Pow(deltaY, 2) * 5;
                            //else
                            //    deltaY = Math.Pow(deltaY, 2) * 5;
                            //MouseControl.DeltaMove((int)Math.Round(deltaX, 0), (int)Math.Round(deltaY));


                            //Check for right, left or Double Click
                            //1. Check if there was already a click 20 Frames ago, or if Drag & Drop is active
                            if (clickDelay > nudClickDelay.Value && !pointNClickActive)
                            {
                                //2. If not, calculate mean values of dy's last 16 Frames
                                if (CalculateMeanConvolutionValues())
                                {
                                    clickDelay = 0;
                                }
                                else
                                {
                                    //Else check for open Mouth
                                    if (mouthOpenValue > (float)nudMouthOpenStartThreshold.Value && mouthOpenHistory[mouthOpenHistory.Count - 2] > (float)nudMouthOpenConfirmation.Value && mouthOpenHistory[mouthOpenHistory.Count - 3] > (float)nudMouthOpenConfirmation.Value && mouthOpenHistory[mouthOpenHistory.Count - 4] > (float)nudMouthOpenConfirmation.Value)
                                    {
                                        MouseControl.Move(mousePositionHistory[mousePositionHistory.Count - 4].X, mousePositionHistory[mousePositionHistory.Count - 4].Y);
                                        this.lbAction.Invoke((MethodInvoker)(() => this.lbAction.Items.Add("Left Mouse Down on X: " + mousePositionHistory[mousePositionHistory.Count - 4].X + " Y: " + mousePositionHistory[mousePositionHistory.Count - 4].Y)));
                                        //lbAction.Items.Add("Left Mouse Down on X: " + mousePositionHistory[mousePositionHistory.Count - 4].X + " Y: " + mousePositionHistory[mousePositionHistory.Count - 4].Y);
                                        MouseControl.MouseDownLeft();
                                        pointNClickActive = true;
                                        clickDelay        = 0;
                                    }
                                }
                            }
                            else if (pointNClickActive)
                            {
                                if (mouthOpenValue < (float)nudMouthOpenEndThreshold.Value)
                                {
                                    this.lbAction.Invoke((MethodInvoker)(() => this.lbAction.Items.Add("Left Mouse Up on X: " + x + " Y: " + y)));
                                    MouseControl.MouseUpLeft();
                                    pointNClickActive = false;
                                    clickDelay        = 0;
                                }
                            }
                            MouseControl.Move(x, y);
                            if (browLowererValue > (float)nudBrowLowererStartThreshold.Value)
                            {
                                MouseControl.ScrollDown((int)(-browLowererValue * (int)nudScrollMultiplierDown.Value));
                            }
                            if (browRaiserValue > (float)nudBrowRaiserStartThreshold.Value)
                            {
                                MouseControl.ScrollDown((int)(browRaiserValue * (int)nudScrollMultiplierUp.Value));
                            }
                            if (mousePositionHistory.Count > 100)
                            {
                                mousePositionHistory.RemoveAt(0);
                            }
                            mousePositionHistory.Add(new Microsoft.Kinect.Toolkit.FaceTracking.Point(x, y));
                            File.AppendAllText("mouseLog.txt", DateTime.Now + " - Face and eyes successfully tracked.\n");
                        }
                    }
                    else
                    {
                        File.AppendAllText("mouseLog.txt", DateTime.Now + " - Face recognized but couldn't find eye in face.\n");
                    }
                    clickDelay++;

                    headRotationHistory.Add(currentFaceFrame.Rotation);
                    if (headRotationHistory.Count >= 100)
                    {
                        headRotationHistory.RemoveAt(0);
                    }
                }
                else
                {
                    File.AppendAllText("mouseLog.txt", DateTime.Now + " - Active Skeleton is null. Couldn't analyze frame.\n");
                }
            }
            catch (Exception e)
            {
                File.AppendAllText("mouseLog.txt", DateTime.Now + " - Error during frame analyzation.\n" + e.ToString());
            }
            finally
            {
                if (colorImageFrame != null)
                {
                    colorImageFrame.Dispose();
                }

                if (depthImageFrame != null)
                {
                    depthImageFrame.Dispose();
                }

                if (skeletonFrame != null)
                {
                    skeletonFrame.Dispose();
                }
            }
        }