Пример #1
0
            // Updates the face tracking information for this skeleton
            internal void OnFrameReady(KinectSensor kinectSensor, ColorImageFormat colorImageFormat, byte[] colorImage, DepthImageFormat depthImageFormat, short[] depthImage, Skeleton skeletonOfInterest)
            {
                this.skeletonTrackingState = skeletonOfInterest.TrackingState;

                if (this.skeletonTrackingState != SkeletonTrackingState.Tracked)
                {
                    // nothing to do with an untracked skeleton.
                    return;
                }

                if (this.faceTracker == null)
                {
                    try
                    {
                        this.faceTracker = new FaceTracker(kinectSensor);
                    }
                    catch (InvalidOperationException)
                    {
                        Debug.WriteLine("AllFramesReady - creating a new FaceTracker threw an InvalidOperationException");
                        this.faceTracker = null;
                    }
                }

                if (this.faceTracker != null)
                {
                    frame = this.faceTracker.Track(
                        colorImageFormat, colorImage, depthImageFormat, depthImage, skeletonOfInterest);

                    this.lastFaceTrackSucceeded = frame.TrackSuccessful;
                    if (this.lastFaceTrackSucceeded)
                    {
                        this.facePoints = frame.GetProjected3DShape();
                    }
                }
            }
Пример #2
0
        public void Evaluate(int SpreadMax)
        {
            //Output static indices all the time
            if (this.first)
            {
                this.FOutIndices.AssignFrom(KinectRuntime.FACE_INDICES);
                this.first = false;
            }

            if (this.FInFrame.PluginIO.IsConnected)
            {
                if (this.FInFrame.IsChanged)
                {
                    this.FOutOK.SliceCount       = FInFrame.SliceCount;
                    this.FOutPosition.SliceCount = FInFrame.SliceCount;
                    this.FOutRotation.SliceCount = FInFrame.SliceCount;
                    this.FOutPts.SliceCount      = FInFrame.SliceCount;
                    this.FOutPPTs.SliceCount     = FInFrame.SliceCount;

                    for (int cnt = 0; cnt < this.FInFrame.SliceCount; cnt++)
                    {
                        FaceTrackFrame frame = this.FInFrame[cnt];
                        this.FOutOK[cnt]       = frame.TrackSuccessful;
                        this.FOutPosition[cnt] = new Vector3(frame.Translation.X, frame.Translation.Y, frame.Translation.Z);
                        this.FOutRotation[cnt] = new Vector3(frame.Rotation.X, frame.Rotation.Y, frame.Rotation.Z) * INVTWOPI;

                        EnumIndexableCollection <FeaturePoint, PointF>    pp = frame.GetProjected3DShape();
                        EnumIndexableCollection <FeaturePoint, Vector3DF> p  = frame.Get3DShape();

                        this.FOutPPTs[cnt].SliceCount = pp.Count;
                        this.FOutPts[cnt].SliceCount  = p.Count;

                        for (int i = 0; i < pp.Count; i++)
                        {
                            this.FOutPPTs[cnt][i] = new Vector2(pp[i].X, pp[i].Y);
                            this.FOutPts[cnt][i]  = new Vector3(p[i].X, p[i].Y, p[i].Z);
                        }


                        /*FaceTriangle[] d = frame.GetTriangles();
                         * this.FOutIndices.SliceCount = d.Length * 3;
                         * for (int i = 0; i < d.Length; i++)
                         * {
                         *  this.FOutIndices[i * 3] = d[i].First;
                         *  this.FOutIndices[i * 3 + 1] = d[i].Second;
                         *  this.FOutIndices[i * 3 + 2] = d[i].Third;
                         * }*/
                    }
                }
            }
            else
            {
                this.FOutPosition.SliceCount = 0;
                this.FOutPPTs.SliceCount     = 0;
                this.FOutPts.SliceCount      = 0;
                this.FOutRotation.SliceCount = 0;
                this.FOutOK.SliceCount       = 0;
            }
        }
Пример #3
0
            /// <summary>
            /// Updates the face tracking information for this skeleton
            /// </summary>
            internal void OnFrameReady(KinectSensor kinectSensor, ColorImageFormat colorImageFormat, byte[] colorImage, DepthImageFormat depthImageFormat, short[] depthImage, Skeleton skeletonOfInterest)
            {
                this.skeletonTrackingState = skeletonOfInterest.TrackingState;

                if (this.skeletonTrackingState != SkeletonTrackingState.Tracked)
                {
                    // nothing to do with an untracked skeleton.
                    return;
                }

                if (this.faceTracker == null)
                {
                    try
                    {
                        this.faceTracker = new FaceTracker(kinectSensor);
                    }
                    catch (InvalidOperationException)
                    {
                        // During some shutdown scenarios the FaceTracker
                        // is unable to be instantiated.  Catch that exception
                        // and don't track a face.
                        Debug.WriteLine("AllFramesReady - creating a new FaceTracker threw an InvalidOperationException");
                        this.faceTracker = null;
                    }
                }

                if (this.faceTracker != null)
                {
                    FaceTrackFrame frame = this.faceTracker.Track(
                        colorImageFormat, colorImage, depthImageFormat, depthImage, skeletonOfInterest);

                    this.lastFaceTrackSucceeded = frame.TrackSuccessful;
                    if (this.lastFaceTrackSucceeded)
                    {
                        if (faceTriangles == null)
                        {
                            // only need to get this once.  It doesn't change.
                            faceTriangles = frame.GetTriangles();
                        }

                        if (this.DrawFaceMesh || this.DrawFeaturePoints != DrawFeaturePoint.None)
                        {
                            this.facePoints = frame.GetProjected3DShape();
                        }

                        // get the shape points array
                        if (this.DrawShapePoints)
                        {
                            // see the !!!README.txt file to add the function
                            // to your toolkit project
                            this.shapePoints = frame.GetShapePoints();
                        }
                    }

                    // draw/remove the components
                    SetFeaturePointsLocations();
                    SetShapePointsLocations();
                }
            }
Пример #4
0
            /// <summary>
            /// Updates the face tracking information for this skeleton
            /// </summary>
            internal void OnFrameReady(KinectSensor kinectSensor,
                                       ColorImageFormat colorImageFormat, byte[] colorImage, DepthImageFormat depthImageFormat,
                                       short[] depthImage, Skeleton skeletonOfInterest) // <---------Skeleton data passed here *****************
            {
                // Here a skeletonOfInterest is available **********************************

                this.skeletonTrackingState = skeletonOfInterest.TrackingState;

                if (this.skeletonTrackingState != SkeletonTrackingState.Tracked)
                {
                    // nothing to do with an untracked skeleton.
                    return;
                }

                if (this.faceTracker == null)
                {
                    try
                    {
                        this.faceTracker = new FaceTracker(kinectSensor);
                    }
                    catch (InvalidOperationException)
                    {
                        // During some shutdown scenarios the FaceTracker
                        // is unable to be instantiated.  Catch that exception
                        // and don't track a face.
                        Debug.WriteLine("AllFramesReady - creating a new FaceTracker threw an InvalidOperationException");
                        this.faceTracker = null;
                    }
                }

                if (this.faceTracker != null)
                {
                    // Here is where a skeletonOfInterest is available ***

                    // Call Track(), passing skeletonOfInterest
                    FaceTrackFrame frame = this.faceTracker.Track(
                        colorImageFormat, colorImage, depthImageFormat, depthImage, skeletonOfInterest);

                    this.lastFaceTrackSucceeded = frame.TrackSuccessful;
                    if (this.lastFaceTrackSucceeded)
                    {
                        if (faceTriangles == null)
                        {
                            // only need to get this once.  It doesn't change.
                            faceTriangles = frame.GetTriangles();
                        }

                        // Assign the facePoints
                        this.facePoints = frame.GetProjected3DShape();

                        // Gets the face data but does not draw the face or the skeleton

                        // This code gets the yaw, pitch and roll Capture it here
                        this.rotation = frame.Rotation;  // <-- frame is a FaceTrackFrame
                    }
                }
            }
Пример #5
0
            /// <summary>
            /// Updates the face tracking information for this skeleton
            /// </summary>
            internal void OnFrameReady(KinectSensor kinectSensor, ColorImageFormat colorImageFormat, byte[] colorImage, DepthImageFormat depthImageFormat, short[] depthImage, Skeleton skeletonOfInterest)
            {
                this.skeletonTrackingState = skeletonOfInterest.TrackingState;

                if (this.skeletonTrackingState != SkeletonTrackingState.Tracked)
                {
                    // nothing to do with an untracked skeleton.
                    return;
                }

                if (this.faceTracker == null)
                {
                    try
                    {
                        this.faceTracker = new FaceTracker(kinectSensor);
                    }
                    catch (InvalidOperationException)
                    {
                        // During some shutdown scenarios the FaceTracker
                        // is unable to be instantiated.  Catch that exception
                        // and don't track a face.
                        this.faceTracker = null;
                    }
                }

                if (this.faceTracker != null)
                {
                    FaceTrackFrame frame = this.faceTracker.Track(
                        colorImageFormat, colorImage, depthImageFormat, depthImage, skeletonOfInterest);

                    this.lastFaceTrackSucceeded = frame.TrackSuccessful;
                    if (this.lastFaceTrackSucceeded)
                    {
                        if (faceTriangles == null)
                        {
                            // only need to get this once.  It doesn't change.
                            faceTriangles = frame.GetTriangles();
                        }

                        this.facePoints = frame.GetProjected3DShape();


                        double dbX = facePoints[5].X;
                        double dbY = facePoints[5].Y;

                        App thisApp = App.Current as App;
                        thisApp.m_dbX = dbX;
                        thisApp.m_dbY = dbY;
                    }
                }
            }
            /// <summary>
            /// Updates the face tracking information for this skeleton
            /// </summary>
            internal void OnFrameReady(KinectSensor kinectSensor, ColorImageFormat colorImageFormat, byte[] colorImage, DepthImageFormat depthImageFormat, short[] depthImage, Skeleton skeletonOfInterest)
            {
                this.skeletonTrackingState = skeletonOfInterest.TrackingState;

                if (this.skeletonTrackingState != SkeletonTrackingState.Tracked)
                {
                    // nothing to do with an untracked skeleton.
                    return;
                }


                if (this.faceTracker == null)
                {
                    try
                    {
                        this.faceTracker = new FaceTracker(kinectSensor);
                    }
                    catch (InvalidOperationException)
                    {
                        // During some shutdown scenarios the FaceTracker
                        // is unable to be instantiated.  Catch that exception
                        // and don't track a face.
                        Debug.WriteLine("AllFramesReady - creating a new FaceTracker threw an InvalidOperationException");
                        this.faceTracker = null;
                    }
                }

                if (this.faceTracker != null)
                {
                    FaceTrackFrame frame = this.faceTracker.Track(
                        colorImageFormat, colorImage, depthImageFormat, depthImage, skeletonOfInterest);

                    this.lastFaceTrackSucceeded = frame.TrackSuccessful;
                    if (this.lastFaceTrackSucceeded)
                    {
                        if (faceTriangles == null)
                        {
                            // only need to get this once.  It doesn't change.
                            faceTriangles = frame.GetTriangles();
                        }

                        this.facePoints = frame.GetProjected3DShape();
                        this.test       = frame.Get3DShape();

                        //info about rotations
                        pitchVal = frame.Rotation.X;
                        rollVal  = frame.Rotation.Z;
                        yawVal   = frame.Rotation.Y;
                    }
                }
            }
Пример #7
0
        private void UpdateMesh(FaceTrackFrame faceTrackingFrame)
        {
            EnumIndexableCollection <FeaturePoint, Vector3DF> shapePoints          = faceTrackingFrame.Get3DShape();
            EnumIndexableCollection <FeaturePoint, PointF>    projectedShapePoints = faceTrackingFrame.GetProjected3DShape();

            if (this.triangleIndices == null)
            {
                // Update stuff that doesn't change from frame to frame
                this.triangleIndices = faceTrackingFrame.GetTriangles();
                var indices = new Int32Collection(this.triangleIndices.Length * 3);
                foreach (FaceTriangle triangle in this.triangleIndices)
                {
                    indices.Add(triangle.Third);
                    indices.Add(triangle.Second);
                    indices.Add(triangle.First);
                }

                this.theGeometry.TriangleIndices = indices;
                this.theGeometry.Normals         = null; // Let WPF3D calculate these.

                this.theGeometry.Positions          = new Point3DCollection(shapePoints.Count);
                this.theGeometry.TextureCoordinates = new PointCollection(projectedShapePoints.Count);
                for (int pointIndex = 0; pointIndex < shapePoints.Count; pointIndex++)
                {
                    this.theGeometry.Positions.Add(new Point3D());
                    this.theGeometry.TextureCoordinates.Add(new Point());
                }
            }

            // Update the 3D model's vertices and texture coordinates
            for (int pointIndex = 0; pointIndex < shapePoints.Count; pointIndex++)
            {
                Vector3DF point = shapePoints[pointIndex];
                this.theGeometry.Positions[pointIndex] = new Point3D(point.X, point.Y, -point.Z);

                PointF projected = projectedShapePoints[pointIndex];

                this.theGeometry.TextureCoordinates[pointIndex] =
                    new Point(
                        projected.X / (double)this.colorImageWritableBitmap.PixelWidth,
                        projected.Y / (double)this.colorImageWritableBitmap.PixelHeight);
            }
        }
Пример #8
0
        private async Task FaceTrackingAsync(TimeSpan dueTime, TimeSpan interval, CancellationToken token)
        {
            if (interval.TotalMilliseconds == 0)
            {
                return;
            }

            // Initial wait time before we begin the periodic loop.
            if (dueTime > TimeSpan.Zero)
            {
                await Task.Delay(dueTime, token);
            }

            DateTime    LocalTimestamp = Timestamp;
            FaceTracker tracker        = new FaceTracker(Sensor);

            // Repeat this loop until cancelled.
            while (!token.IsCancellationRequested)
            {
                // Skip already work with given data
                if (Timestamp == LocalTimestamp)
                {
                    await Task.Delay(interval, token);

                    continue;
                }

                // Timestamp data
                LocalTimestamp = Timestamp;
                FaceTrackWatch.Again();

                // Do Job
                try {
                    CopyColorData = true;
                    CopySkeletons = true;
                    FPoints       = null;
                    Mood          = 0;
                    if (null != GestureManager && null != GestureManager.Skeleton)
                    {
                        FaceTrackFrame frame = tracker.Track(ColorFormat, ColorData, DepthFormat, DepthData, GestureManager.Skeleton);
                        if (frame.TrackSuccessful)
                        {
                            // Only once.  It doesn't change.
                            if (FTriangles == null)
                            {
                                FTriangles = frame.GetTriangles();
                            }
                            FPoints = frame.GetProjected3DShape();
                            Mood    = frame.GetAnimationUnitCoefficients()[AnimationUnit.LipCornerDepressor];
                            WSRProfileManager.GetInstance().UpdateMood(Mood);
                        }
                    }
                }
                catch (Exception ex) {
                    WSRConfig.GetInstance().logError("FACE", ex);
                }
                FaceTrackWatch.Stop();

                // Wait to repeat again.
                if (interval > TimeSpan.Zero)
                {
                    await Task.Delay(interval, token);
                }
            }

            // Dispose Tracker
            tracker.Dispose();
        }
Пример #9
0
        private void UpdateMesh(FaceTrackFrame faceTrackingFrame)
        {
            //Console.Out.WriteLine(" ###################### In UpdateMesh ############################# ");
            bool faceInCentre = true;

            EnumIndexableCollection <FeaturePoint, Vector3DF> shapePoints          = faceTrackingFrame.Get3DShape();
            EnumIndexableCollection <FeaturePoint, PointF>    projectedShapePoints = faceTrackingFrame.GetProjected3DShape();

            if (this.triangleIndices == null)
            {
                // Update stuff that doesn't change from frame to frame
                this.triangleIndices = faceTrackingFrame.GetTriangles();
                var indices = new Int32Collection(this.triangleIndices.Length * 3);
                foreach (FaceTriangle triangle in this.triangleIndices)
                {
                    indices.Add(triangle.Third);
                    indices.Add(triangle.Second);
                    indices.Add(triangle.First);
                }

                this.theGeometry.TriangleIndices = indices;
                this.theGeometry.Normals         = null; // Let WPF3D calculate these.

                this.theGeometry.Positions          = new Point3DCollection(shapePoints.Count);
                this.theGeometry.TextureCoordinates = new PointCollection(projectedShapePoints.Count);
                for (int pointIndex = 0; pointIndex < shapePoints.Count; pointIndex++)
                {
                    this.theGeometry.Positions.Add(new Point3D());
                    this.theGeometry.TextureCoordinates.Add(new Point());
                }
            }

            // Update the 3D model's vertices and texture coordinates
            for (int pointIndex = 0; pointIndex < shapePoints.Count; pointIndex++)
            {
                Vector3DF point = shapePoints[pointIndex];
                this.theGeometry.Positions[pointIndex] = new Point3D(point.X, point.Y, -point.Z);

                PointF projected = projectedShapePoints[pointIndex];

                this.theGeometry.TextureCoordinates[pointIndex] =
                    new Point(
                        projected.X / (double)this.colorImageWritableBitmap.PixelWidth,
                        projected.Y / (double)this.colorImageWritableBitmap.PixelHeight);

//                Console.Out.WriteLine("X = " + projected.X / (double)this.colorImageWritableBitmap.PixelWidth  + "Y = " + projected.Y / (double)this.colorImageWritableBitmap.PixelHeight);
                if (projected.X / (double)this.colorImageWritableBitmap.PixelWidth > .6 ||
                    projected.Y / (double)this.colorImageWritableBitmap.PixelHeight > .75)
                {
                    faceInCentre = false;
                }
            }

            if (faceInCentre)
            {
//                copyFaceImage();
                FaceMesh tempMeshData = new FaceMesh();
                tempMeshData.FaceViewport = viewport3d;
                FaceMeshData = tempMeshData;
            }
        }
Пример #10
0
        public void Evaluate(int SpreadMax)
        {
            //Output static indices all the time
            if (this.first)
            {
                this.FOutIndices.AssignFrom(KinectRuntime.FACE_INDICES);
                this.first = false;
            }

            if (this.FInFrame.IsConnected)
            {
                if (this.FInFrame.IsChanged)
                {
                    this.FOutOK.SliceCount       = FInFrame.SliceCount;
                    this.FOutPosition.SliceCount = FInFrame.SliceCount;
                    this.FOutRotation.SliceCount = FInFrame.SliceCount;
                    this.FOutPts.SliceCount      = FInFrame.SliceCount;
                    this.FOutPPTs.SliceCount     = FInFrame.SliceCount;

                    for (int cnt = 0; cnt < this.FInFrame.SliceCount; cnt++)
                    {
                        FaceTrackFrame frame = this.FInFrame[cnt];
                        this.FOutOK[cnt]       = frame.TrackSuccessful;
                        this.FOutPosition[cnt] = new Vector3(frame.Translation.X, frame.Translation.Y, frame.Translation.Z);
                        this.FOutRotation[cnt] = new Vector3(frame.Rotation.X, frame.Rotation.Y, frame.Rotation.Z) * (float)VMath.DegToCyc;

                        EnumIndexableCollection <FeaturePoint, PointF>    pp = frame.GetProjected3DShape();
                        EnumIndexableCollection <FeaturePoint, Vector3DF> p  = frame.Get3DShape();

                        this.FOutPPTs[cnt].SliceCount    = pp.Count;
                        this.FOutPts[cnt].SliceCount     = p.Count;
                        this.FOutNormals[cnt].SliceCount = p.Count;

                        //Compute smoothed normals
                        Vector3[] norms    = new Vector3[p.Count];
                        int[]     inds     = KinectRuntime.FACE_INDICES;
                        int       tricount = inds.Length / 3;
                        for (int j = 0; j < tricount; j++)
                        {
                            int i1 = inds[j * 3];
                            int i2 = inds[j * 3 + 1];
                            int i3 = inds[j * 3 + 2];

                            Vector3 v1 = p[i1].SlimVector();
                            Vector3 v2 = p[i2].SlimVector();
                            Vector3 v3 = p[i3].SlimVector();

                            Vector3 faceEdgeA = v2 - v1;
                            Vector3 faceEdgeB = v1 - v3;
                            Vector3 norm      = Vector3.Cross(faceEdgeB, faceEdgeA);

                            norms[i1] += norm;
                            norms[i2] += norm;
                            norms[i3] += norm;
                        }

                        for (int i = 0; i < pp.Count; i++)
                        {
                            this.FOutPPTs[cnt][i]    = new Vector2(pp[i].X, pp[i].Y);
                            this.FOutPts[cnt][i]     = new Vector3(p[i].X, p[i].Y, p[i].Z);
                            this.FOutNormals[cnt][i] = Vector3.Normalize(norms[i]);
                        }

                        /*FaceTriangle[] d = frame.GetTriangles();
                         * this.FOutIndices.SliceCount = d.Length * 3;
                         * for (int i = 0; i < d.Length; i++)
                         * {
                         *  this.FOutIndices[i * 3] = d[i].First;
                         *  this.FOutIndices[i * 3 + 1] = d[i].Second;
                         *  this.FOutIndices[i * 3 + 2] = d[i].Third;
                         * }*/
                    }
                }
            }
            else
            {
                this.FOutPosition.SliceCount = 0;
                this.FOutPPTs.SliceCount     = 0;
                this.FOutPts.SliceCount      = 0;
                this.FOutRotation.SliceCount = 0;
                this.FOutOK.SliceCount       = 0;
            }
        }
        /// <summary>
        /// Updates the face tracking information for this skeleton
        /// </summary>
        internal void OnFrameReady(KinectSensor sensor, ColorImageFormat colorImageFormat, byte[] colorImage, DepthImageFormat depthImageFormat, short[] depthImage)
        {
            if (this.faceTracker == null)
            {
                try
                {
                    this.faceTracker = new FaceTracker(sensor);
                }
                catch (InvalidOperationException)
                {
                    // During some shutdown scenarios the FaceTracker
                    // is unable to be instantiated.  Catch that exception
                    // and don't track a face.
                    Debug.WriteLine("AllFramesReady - creating a new FaceTracker threw an InvalidOperationException");
                    this.faceTracker = null;
                }
            }

            if (this.faceTracker != null)
            {
                FaceTrackFrame frame = this.faceTracker.Track(colorImageFormat, colorImage, depthImageFormat, depthImage);

                this.lastFaceTrackSucceeded = frame.TrackSuccessful;
                if (this.lastFaceTrackSucceeded)
                {
                    if (faceTriangles == null)
                    {
                        // only need to get this once.  It doesn't change.
                        faceTriangles = frame.GetTriangles();
                    }

                    this.facePoints   = frame.GetProjected3DShape();
                    this.facePoints3D = frame.Get3DShape();
                    //X-man modification
                    this.AU = frame.GetAnimationUnitCoefficients();
                }
                if (faceTriangles != null && facePoints != null)
                {
                    faceUpdated(this, new FaceTrackerEventArgs(facePoints, faceTriangles));
                }
                if (FaceRecognition.compare)
                {
                    FaceRecognition.recognizer(this);
                }
                if (mouthOpened.detect)
                {
                    mouthOpened.mouthRecognizer();
                }
                if (mouthShut.detect)
                {
                    mouthShut.mouth2Recognizer();
                }
                if (lookingDirection.detect)
                {
                    lookingDirection.lookRecognizer();
                }
                if (pupilRight.detect)
                {
                    pupilRight.pupilRecognizer();
                }
            }
        }
Пример #12
0
        void kinectSensor_AllFramesReady(object sender, AllFramesReadyEventArgs e)
        {
            // Retrieve each single frame and copy the data
            using (ColorImageFrame colorImageFrame = e.OpenColorImageFrame())
            {
                if (colorImageFrame == null)
                {
                    return;
                }
                colorImageFrame.CopyPixelDataTo(colorPixelData);
                //int strade = colorImageFrame.Width * 4;
                //image1.Source = BitmapSource.Create(colorImageFrame.Width, colorImageFrame.Height, 96, 96,
                //                                    PixelFormats.Bgr32, null, colorPixelData, strade);
            }

            using (DepthImageFrame depthImageFrame = e.OpenDepthImageFrame())
            {
                if (depthImageFrame == null)
                {
                    return;
                }
                depthImageFrame.CopyPixelDataTo(depthPixelData);
            }

            using (SkeletonFrame skeletonFrame = e.OpenSkeletonFrame())
            {
                if (skeletonFrame == null)
                {
                    return;
                }
                skeletonFrame.CopySkeletonDataTo(skeletonData);
            }

            // Retrieve the first tracked skeleton if any. Otherwise, do nothing.
            var skeleton = skeletonData.FirstOrDefault(s => s.TrackingState == SkeletonTrackingState.Tracked);

            if (skeleton == null && !sessionClose)
            {
                serialP.WriteLine("s");
                serialP.WriteLine("c");
                serialP.WriteLine("p");
                serialP.WriteLine("g");
                if (isActive)
                {
                    isActive = false;
                }

                slejenie           = false;
                activatorRightHand = 0;
                activatorLeftHand  = false;
                firstMeet          = false;

                sessionClose = true;
                return;
            }
            else if (skeleton != null && !firstMeet)
            {
                serialP.WriteLine("i");
                playsound(comms[0]);
                firstMeet    = true;
                sessionClose = false;
            }
            if (sessionClose)
            {
                return;
            }
            // Make the faceTracker processing the data.
            FaceTrackFrame faceFrame = faceTracker.Track(kinectSensor.ColorStream.Format, colorPixelData,
                                                         kinectSensor.DepthStream.Format, depthPixelData,
                                                         skeleton);

            EnumIndexableCollection <FeaturePoint, PointF> facePoints = faceFrame.GetProjected3DShape();


            // points of hands and shoulder - to determine HELLO, etc.
            Joint shoulderCenter = skeleton.Joints[JointType.ShoulderCenter];
            Joint head           = skeleton.Joints[JointType.Head];
            Joint rightHand      = skeleton.Joints[JointType.HandRight];
            Joint leftHand       = skeleton.Joints[JointType.HandLeft];

            // initialize sound for hello
            //SoundPlayer a = new SoundPlayer("C:\\sal.wav");


            // open stream for uart reading
            //serialP.Open();

            // points of lip's corner - with help of this I determine smile
            double x1 = facePoints[88].X;
            double y1 = facePoints[88].Y;

            System.Windows.Point leftLip = new System.Windows.Point(x1, y1);
            double x2 = facePoints[89].X;
            double y2 = facePoints[89].Y;

            System.Windows.Point rightLip = new System.Windows.Point(x2, y2);
            Vector subtr = System.Windows.Point.Subtract(leftLip, rightLip);

            // distance between kinect and human
            distance = skeleton.Position.Z * 100;

            // distance between two corners of lip
            double length = Math.Sqrt(subtr.X * subtr.X + subtr.Y * subtr.Y);

            int check = 100;

            double angle1 = 0d;
            double angle2 = 0d;
            double angle  = skeleton.Position.X * 100;

            #region "Smile deterine"
            if (distance >= 95 && distance < 110)
            {
                check = 22;
            }
            else if (distance >= 110 && distance < 120)
            {
                check = 19;
            }
            else if (distance >= 120 && distance < 130)
            {
                check = 18;
            }
            else if (distance >= 130 && distance < 140)
            {
                check = 17;
            }
            else if (distance >= 140 && distance < 150)
            {
                check = 16;
            }
            else if (distance >= 150 && distance < 160)
            {
                check = 14;
            }
            else if (distance >= 160 && distance < 170)
            {
                check = 13;
            }
            else if (distance >= 170 && distance < 180)
            {
                check = 12;
            }
            else if (distance >= 180 && distance < 190)
            {
                check = 11;
            }

            #endregion

            #region "Angle"
            if (distance >= 90 && distance < 110)
            {
                angle1 = -15;
                angle2 = 15;
            }
            else if (distance >= 110 && distance < 150)
            {
                angle1 = -20;
                angle2 = 20;
            }
            else if (distance >= 150 && distance < 170)
            {
                angle1 = -30;
                angle2 = 30;
            }
            else if (distance >= 170 && distance < 200)
            {
                angle1 = -35;
                angle2 = 35;
            }
            else if (distance >= 200)
            {
                angle1 = -40;
                angle2 = 40;
            }
            #endregion

            double condition1 = Math.Abs(leftHand.Position.Z * 100 - shoulderCenter.Position.Z * 100);
            double condition2 = Math.Abs(rightHand.Position.Z * 100 - shoulderCenter.Position.Z * 100);

            // If position of two hands higher than shoulder it's activate 'slejenie za ob'ektom'
            if (condition1 > 45 &&
                condition2 > 45 &&
                leftHand.Position.X < rightHand.Position.X)
            {
                if (!slejenie)
                {
                    isActive       = true;
                    FIXED_DISTANCE = distance;
                    slejenie       = true;
                }
            }

            // The command to stop 'slejenie za ob'ektom'
            if (leftHand.Position.X > rightHand.Position.X)
            {
                isActive = false;
            }

            // Slejenie za ob'ektom
            if (isActive)
            {
                int pinkIs   = (int)typeCondition.THIRD;
                int purpleIs = (int)typeCondition.FORTH;
                int redIs    = (int)typeCondition.FIVTH;
                int yellowIs = (int)typeCondition.SIXTH;

                if (distance > FIXED_DISTANCE + 10.0d)
                {
                    if (angle < angle1)
                    {
                        ellipseSmile.Fill = Brushes.Pink;
                        if (currentAction != pinkIs)//povorot na pravo
                        {
                            currentAction = pinkIs;
                            serialP.WriteLine("r");
                        }
                    }
                    else if (angle > angle2)//povorot na levo
                    {
                        ellipseSmile.Fill = Brushes.Purple;
                        if (currentAction != purpleIs)
                        {
                            currentAction = purpleIs;
                            serialP.WriteLine("l");
                        }
                    }
                    else
                    {
                        ellipseSmile.Fill = Brushes.Red;
                        if (currentAction != redIs)// vpered
                        {
                            currentAction = redIs;
                            serialP.WriteLine("f");
                        }
                    }
                }
                else if (distance > 90)
                {
                    if (angle < angle1)
                    {
                        ellipseSmile.Fill = Brushes.Pink;
                        if (currentAction != pinkIs)//na pravo
                        {
                            currentAction = pinkIs;
                            serialP.WriteLine("r");
                        }
                    }
                    else if (angle > angle2)
                    {
                        ellipseSmile.Fill = Brushes.Purple;
                        if (currentAction != purpleIs)// na levo
                        {
                            currentAction = purpleIs;
                            serialP.WriteLine("l");
                        }
                    }
                    else
                    {
                        ellipseSmile.Fill = Brushes.Yellow;
                        if (currentAction != yellowIs)//stop, ili - do nothing
                        {
                            currentAction = yellowIs;
                            serialP.WriteLine("s");
                        }
                    }
                }
                else
                {
                    ellipseSmile.Fill = Brushes.Yellow;
                    if (currentAction != yellowIs)//stop, ili - do nothing
                    {
                        currentAction = yellowIs;
                        serialP.WriteLine("s");
                    }
                }
            }


            // esli 'slejenie za ob'ektom' otklu4en
            else if (!isActive)
            {
                int blueIs  = (int)typeCondition.FIRST;
                int blackIs = (int)typeCondition.SECOND;
                int onkol   = (int)typeCondition.SEVENTH;

                if (leftHand.Position.Y > head.Position.Y && rightHand.Position.Y < shoulderCenter.Position.Y)
                {
                    ellipseSmile.Fill = Brushes.Blue;
                    if (currentAction != blueIs && !activatorLeftHand)//privet levoi rukoi ----------------------------------------------------------------------------

                    {
                        currentAction = blueIs;
                        serialP.WriteLine("q");
                        activatorLeftHand = true;
                    }
                }

                else if (rightHand.Position.Y > head.Position.Y && leftHand.Position.Y < shoulderCenter.Position.Y)
                {
                    ellipseSmile.Fill = Brushes.Blue;
                    if (currentAction != onkol && activatorRightHand != 12)//privet pravoi rukoi   -----------------------------------------------------------------------------
                    {
                        currentAction = onkol;
                        serialP.WriteLine("w");
                        activatorRightHand = 12;
                    }
                }

                else
                {
                    ellipseSmile.Fill = Brushes.Black;
                    if (currentAction != blackIs)// toktaidy ili do nothing
                    {
                        currentAction = blackIs;
                        serialP.WriteLine("s");
                    }


                    if (currentAction == blackIs)
                    {
                        if (length >= check && currentFace != (int)faceConditions.FIRST)
                        {
                            serialP.WriteLine("z"); // smile
                            currentFace       = (int)faceConditions.FIRST;
                            ellipseSmile.Fill = Brushes.Brown;
                        }
                        else if (length < check && currentFace != (int)faceConditions.SECOND)
                        {
                            serialP.WriteLine("x"); // poker face
                            currentFace       = (int)faceConditions.SECOND;
                            ellipseSmile.Fill = Brushes.Gold;
                        }

                        #region "povoroti golovoi"
                        if (angle < angle1)
                        {
                            ellipseSmile.Fill = Brushes.Pink;
                            if (!headToRight)//povorot golovi na pravo
                            {
                                headToRight  = true;
                                headToCenter = false;
                                headToLeft   = false;
                                serialP.WriteLine("k");
                            }
                        }
                        else if (angle > angle2)//povorot golovi na levo
                        {
                            if (!headToLeft)
                            {
                                headToLeft   = true;
                                headToCenter = false;
                                headToRight  = false;
                                serialP.WriteLine("j");
                            }
                        }
                        else if (angle < angle2 && angle > angle1)//golova v centre
                        {
                            if (!headToCenter)
                            {
                                headToCenter = true;
                                headToRight  = false;
                                headToLeft   = false;
                                serialP.WriteLine("p");
                            }
                        }
                        #endregion
                    }
                    else if (!faceFrame.TrackSuccessful && currentFace != (int)faceConditions.NONE)
                    {
                        serialP.WriteLine("c"); // sad face
                        currentFace       = (int)faceConditions.NONE;
                        ellipseSmile.Fill = Brushes.Chocolate;
                    }
                }
            }

            label2.Content = distance.ToString();
            //label1.Content = (leftHand.Position.Z * 100).ToString();
            //label3.Content = (shoulderCenter.Position.Z * 100).ToString();

            //serialP.Close();
        }
Пример #13
0
            private bool CheckFace(KinectSensor kinectSensor, ColorImageFormat colorImageFormat, byte[] colorImage, DepthImageFormat depthImageFormat, short[] depthImage, Skeleton skeletonOfInterest)
            {
                this.skeletonTrackingState = skeletonOfInterest.TrackingState;

                if (this.skeletonTrackingState != SkeletonTrackingState.Tracked)
                {
                    // nothing to do with an untracked skeleton.
                    return(false);
                }

                if (this.faceTracker == null)
                {
                    try
                    {
                        this.faceTracker = new FaceTracker(kinectSensor);
                    }
                    catch (InvalidOperationException)
                    {
                        // During some shutdown scenarios the FaceTracker
                        // is unable to be instantiated.  Catch that exception
                        // and don't track a face.
                        Debug.WriteLine("AllFramesReady - creating a new FaceTracker threw an InvalidOperationException");
                        this.faceTracker = null;
                    }
                }

                if (this.faceTracker != null)
                {
                    FaceTrackFrame frame = this.faceTracker.Track(
                        colorImageFormat, colorImage, depthImageFormat, depthImage, skeletonOfInterest);

                    this.lastFaceTrackSucceeded = frame.TrackSuccessful;
                    if (this.lastFaceTrackSucceeded)
                    {
                        if (faceTriangles == null)
                        {
                            // only need to get this once.  It doesn't change.
                            faceTriangles = frame.GetTriangles();
                        }

                        //getting the Animation Unit Coefficients
                        this.AUs = frame.GetAnimationUnitCoefficients();
                        var jawLowerer   = AUs[AnimationUnit.JawLower];
                        var browLower    = AUs[AnimationUnit.BrowLower];
                        var browRaiser   = AUs[AnimationUnit.BrowRaiser];
                        var lipDepressor = AUs[AnimationUnit.LipCornerDepressor];
                        var lipRaiser    = AUs[AnimationUnit.LipRaiser];
                        var lipStretcher = AUs[AnimationUnit.LipStretcher];
                        //set up file for output
                        using (System.IO.StreamWriter file = new System.IO.StreamWriter
                                                                 (@"C:\Users\Public\data.txt"))
                        {
                            file.WriteLine("FaceTrack Data, started recording at " + DateTime.Now.ToString("HH:mm:ss tt"));
                        }

                        //here is the algorithm to test different facial features

                        //BrowLower is messed up if you wear glasses, works if you don't wear 'em

                        string state = "";

                        //surprised
                        if ((jawLowerer < 0.25 || jawLowerer > 0.25) && browLower < 0)
                        {
                            state = "surprised";
                        }
                        //smiling
                        if (lipStretcher > 0.4 || lipDepressor < 0)
                        {
                            state = "smiling";
                        }
                        //sad
                        if (browRaiser < 0 && lipDepressor > 0)
                        {
                            state = "sad";
                        }
                        //angry
                        if ((browLower > 0 && (jawLowerer > 0.25 || jawLowerer < -0.25)) ||
                            (browLower > 0 && lipDepressor > 0))
                        {
                            state = "angry";
                        }
                        //System.Diagnostics.Debug.WriteLine(browLower);

                        this.facePoints = frame.GetProjected3DShape();

                        if (states[currentState] == state)
                        {
                            Trace.WriteLine("Yo!");
                            return(true);
                        }
                    }
                }

                return(false);
            }
Пример #14
0
            internal void OnFrameReady(KinectSensor kinectSensor, ColorImageFormat colorImageFormat, byte[] colorImage, DepthImageFormat depthImageFormat, short[] depthImage, Skeleton skeletonOfInterest)
            {
                if (!timerStarted)
                {
                    timer.Start();
                    timerStarted = true;
                }
                //increment our frames
                numberOfFrames++;


                this.skeletonTrackingState = skeletonOfInterest.TrackingState;

                if (this.skeletonTrackingState != SkeletonTrackingState.Tracked)
                {
                    // nothing to do with an untracked skeleton.
                    return;
                }

                if (this.faceTracker == null)
                {
                    try
                    {
                        this.faceTracker = new FaceTracker(kinectSensor);
                    }
                    catch (InvalidOperationException)
                    {
                        // During some shutdown scenarios the FaceTracker
                        // is unable to be instantiated.  Catch that exception
                        // and don't track a face.
                        Debug.WriteLine("AllFramesReady - creating a new FaceTracker threw an InvalidOperationException");
                        this.faceTracker = null;
                    }
                }

                if (this.faceTracker != null)
                {
                    FaceTrackFrame frame = this.faceTracker.Track(
                        colorImageFormat, colorImage, depthImageFormat, depthImage, skeletonOfInterest);

                    this.lastFaceTrackSucceeded = frame.TrackSuccessful;
                    if (this.lastFaceTrackSucceeded)
                    {
                        if (faceTriangles == null)
                        {
                            // only need to get this once.  It doesn't change.
                            faceTriangles = frame.GetTriangles();
                        }

                        this.facePoints = frame.GetProjected3DShape();

                        /*if ()
                         * {
                         *  Debug.WriteLine("hit " + (frameIter * sampleRate) + " frames in " + (timer.Elapsed) + " seconds");
                         *  frameIter++;
                         * }*/

                        //Also grab our points
                        EnumIndexableCollection <FeaturePoint, Vector3DF> facePoints3D = frame.Get3DShape();
                        int index = 0;
                        if (numberOfFrames > frameIter * sampleRate && frameIter < 5) //only grab 4 samples over our given sample rate
                        {
                            //Create a new thread so we don't make the visual thread throw up all over the place
                            new Thread(() =>
                            {
                                Thread.CurrentThread.IsBackground = true;

                                List <Tuple <float, float, float> > myPoints = new List <Tuple <float, float, float> >();
                                foreach (Vector3DF vector in facePoints3D)
                                {
                                    //csv.Append(string.Format("( ({1}, {2}, {3}){4}",vector.X, vector.Y, vector.Z, Environment.NewLine));
                                    myPoints.Add(new Tuple <float, float, float>(vector.X, vector.Y, vector.Z));
                                    index++;
                                }
                                calculateDistances(myPoints);
                                frameIter++;
                            }).Start();
                            //once = true;
                        }

                        if (frameIter == 5)
                        {
                            SetStatusText("Generating histograms...");
                            Console.WriteLine("We are ready to sample");
                            foreach (float distance in sampleOneDistances)
                            {
                                int sampleOneIndex = (int)Math.Floor(64 * distance / sampleOneMaxDistance);
                                sampleOneHistogram[sampleOneIndex]++;
                            }
                            foreach (float distance in sampleTwoDistances)
                            {
                                sampleTwoHistogram[(int)Math.Floor(64 * distance / sampleTwoMaxDistance)]++;
                            }
                            foreach (float distance in sampleThreeDistances)
                            {
                                sampleThreeHistogram[(int)Math.Floor(64 * distance / sampleThreeMaxDistance)]++;
                            }
                            foreach (float distance in sampleFourDistances)
                            {
                                sampleFourHistogram[(int)Math.Floor(64 * distance / sampleFourMaxDistance)]++;
                            }

                            //Go through histogram and divide by distances



                            //Get
                            for (int i = 0; i < sampleOneHistogram.Length; i++)
                            {
                                sampleOneHistogram[i] = sampleOneHistogram[i] / sampleOneDistances.Count;
                            }

                            for (int i = 0; i < sampleTwoHistogram.Length; i++)
                            {
                                sampleTwoHistogram[i] = sampleTwoHistogram[i] / sampleTwoDistances.Count;
                            }

                            for (int i = 0; i < sampleThreeHistogram.Length; i++)
                            {
                                sampleThreeHistogram[i] = sampleThreeHistogram[i] / sampleThreeDistances.Count;
                            }

                            for (int i = 0; i < sampleFourHistogram.Length; i++)
                            {
                                sampleFourHistogram[i] = sampleFourHistogram[i] / sampleFourDistances.Count;
                            }

                            int iter = 0;

                            foreach (int count in sampleTwoHistogram)//can iterate through any histogram, they're all of size 65
                            {
                                Console.WriteLine("Count for hist1/2/3/4[" + iter + "] is " + count + "/" + sampleOneHistogram[iter] + "/" + sampleThreeHistogram[iter] + "/" + sampleFourHistogram[iter]);
                                iter++;
                            }

                            //Write our histograms to a csv file
                            String[] sampleOneHistString = Array.ConvertAll(sampleOneHistogram, x => x.ToString());


                            using (System.IO.StreamWriter file = new System.IO.StreamWriter(testFilePath))
                            {
                                file.Write(string.Join(",", Enumerable.Range(1, 65).ToArray()) + Environment.NewLine);
                                file.Write(string.Join(",", sampleOneHistString));
                                file.Write(Environment.NewLine);
                                file.Write(string.Join(",", Array.ConvertAll(sampleTwoHistogram, x => x.ToString())));
                                file.Write(Environment.NewLine);
                                file.Write(string.Join(",", Array.ConvertAll(sampleThreeHistogram, x => x.ToString())));
                                file.Write(Environment.NewLine);
                                file.Write(string.Join(",", Array.ConvertAll(sampleFourHistogram, x => x.ToString())));
                            }
                            //pass that data file to jar
                            String jarPath = "C:\\Users\\Datalab\\Documents\\GitHub\\WekaClassifier\\jar\\wekaClassifier.jar";
                            System.Diagnostics.Process clientProcess = new Process();
                            String jarargs = "C:\\Users\\Datalab\\Documents\\GitHub\\WekaClassifier\\data\\training_data.arff  C:\\Users\\Datalab\\Documents\\GitHub\\WekaClassifier\\data\\testFormat.dat";
                            clientProcess.StartInfo.FileName  = "java";
                            clientProcess.StartInfo.Arguments = "-jar " + jarPath + " " + jarargs;
                            clientProcess.StartInfo.RedirectStandardOutput = true;
                            clientProcess.StartInfo.UseShellExecute        = false;
                            clientProcess.Start();


                            String output = clientProcess.StandardOutput.ReadToEnd();
                            Console.WriteLine(output);
                            clientProcess.WaitForExit();
                            int code = clientProcess.ExitCode;

                            //write to dat file with 4 histograms averaged


                            frameIter++; //only do this once (will make conditional evaluate to false. Is this clean and clear? Not really? Do I care? Not particularly. At least it's documented.
                            ftNumPeople++;
                            SetPeopleText("People tracked : " + ftNumPeople);
                            SetStatusText("Status: waiting....");
                            SetPredictionText("Guess: " + output);
                        }
                    }
                }
            }
Пример #15
0
            /// <summary>
            /// Updates the face tracking information for this skeleton
            /// </summary>
            internal void OnFrameReady(KinectSensor kinectSensor, ColorImageFormat colorImageFormat, byte[] colorImage, DepthImageFormat depthImageFormat, short[] depthImage, Skeleton skeletonOfInterest)
            {
                this.skeletonTrackingState = skeletonOfInterest.TrackingState;

                if (this.skeletonTrackingState != SkeletonTrackingState.Tracked)
                {
                    // nothing to do with an untracked skeleton.
                    return;
                }

                if (this.faceTracker == null)
                {
                    try
                    {
                        this.faceTracker = new FaceTracker(kinectSensor);
                    }
                    catch (InvalidOperationException)
                    {
                        // During some shutdown scenarios the FaceTracker
                        // is unable to be instantiated.  Catch that exception
                        // and don't track a face.
                        Debug.WriteLine("AllFramesReady - creating a new FaceTracker threw an InvalidOperationException");
                        this.faceTracker = null;
                    }
                }

                if (this.faceTracker != null)
                {
                    FaceTrackFrame frame = this.faceTracker.Track(
                        colorImageFormat, colorImage, depthImageFormat, depthImage, skeletonOfInterest);
                    this.lastFaceTrackSucceeded = frame.TrackSuccessful;
                    if (this.lastFaceTrackSucceeded)
                    {
                        if (faceTriangles == null)
                        {
                            //    only need to get this once.  It doesn't change.
                            faceTriangles = frame.GetTriangles();
                        }

                        this.facePoints3DRaw = frame.Get3DShape();
                        this.facePoints      = frame.GetProjected3DShape();
                        animationUnitsRaw    = frame.GetAnimationUnitCoefficients();
                    }
                    x              = frame.Rotation.X;
                    y              = frame.Rotation.Y;
                    z              = frame.Rotation.Z;
                    facePointS3D   = this.facePoints3DRaw;
                    animationUnits = animationUnitsRaw;
                    //Debug.WriteLine(animationUnits[AnimationUnit.JawLower]);
                    //Debug.WriteLine(animationUnits[AnimationUnit.BrowLower]);
                    //Debug.WriteLine(animationUnits[AnimationUnit.BrowRaiser]);
                    //Debug.WriteLine(animationUnits[AnimationUnit.JawLower]);
                    //Debug.WriteLine(animationUnits[AnimationUnit.LipCornerDepressor]);
                    //Debug.WriteLine(animationUnits[AnimationUnit.LipRaiser]);
                    //Debug.WriteLine(animationUnits[AnimationUnit.LipStretcher]);
                    //Debug.WriteLine(frame.Translation.ToString());
                    //Debug.WriteLine(frame.Rotation.ToString());
                    //this.facePoints[FeaturePoint.AboveChin].X+2;
                    //Debug.WriteLine(frame.Translation.X.ToString());
                    //Debug.WriteLine(frame.Translation.Y.ToString());
                    //Debug.WriteLine(frame.Translation.Z.ToString());
                }
            }
Пример #16
0
            public void OnFrameReady(KinectSensor kinectSensor, ColorImageFormat colorImageFormat, byte[] colorImage, DepthImageFormat depthImageFormat, short[] depthImage, Skeleton skeletonOfInterest)
            {
                this.skeletonTrackingState = skeletonOfInterest.TrackingState;

                if (this.skeletonTrackingState != SkeletonTrackingState.Tracked)
                {
                    return;
                }

                if (this.faceTracker == null)
                {
                    try
                    {
                        this.faceTracker = new FaceTracker(kinectSensor);
                    }
                    catch (InvalidOperationException)
                    {
                        Debug.WriteLine("AllFramesReady - creating a new FaceTracker threw an InvalidOperationException");
                        this.faceTracker = null;
                    }
                }

                if (this.faceTracker != null)
                {
                    FaceTrackFrame frame = this.faceTracker.Track(
                        colorImageFormat, colorImage, depthImageFormat, depthImage, skeletonOfInterest);

                    this.lastFaceTrackSucceeded = frame.TrackSuccessful;
                    if (this.lastFaceTrackSucceeded)
                    {
                        if (faceTriangles == null)
                        {
                            faceTriangles = frame.GetTriangles();
                        }

                        this.facePoints = frame.GetProjected3DShape();

                        XmlDocument XmlDocKalibracja = new XmlDocument();
                        XmlDocKalibracja.Load(System.AppDomain.CurrentDomain.BaseDirectory + "\\settings.xml");
                        XmlNodeList elemListKalibracja = XmlDocKalibracja.GetElementsByTagName("options");
                        kalibracja = Convert.ToBoolean(Convert.ToInt32(elemListKalibracja[0].Attributes["kalibracja"].Value));

                        if (kalibracja == false)
                        {
                            kalibracjaCounter++;
                            if (kalibracjaCounter == 1)
                            {
                                Kalibracja okno = new Kalibracja();
                                okno.Show();
                            }

                            if (kalibracjaCounter > 150)
                            {
                                oldX = Convert.ToInt32(this.facePoints[23].X);
                                oldY = -Convert.ToInt32(this.facePoints[23].Y);

                                oldMouseX = System.Windows.Forms.Cursor.Position.X;
                                oldMouseY = System.Windows.Forms.Cursor.Position.Y;

                                aktualnyX  = oldX;
                                aktualnyY  = oldY;
                                kalibracja = true;

                                string newValue = "1";
                                kalibracjaCounter = 0;

                                XmlDocument xmlDoc = new XmlDocument();
                                xmlDoc.Load(System.AppDomain.CurrentDomain.BaseDirectory + "\\settings.xml");
                                XmlNode node = xmlDoc.SelectSingleNode("options");
                                node.Attributes[5].Value = Convert.ToString(newValue);
                                xmlDoc.Save(System.AppDomain.CurrentDomain.BaseDirectory + "\\settings.xml");
                            }
                        }

                        if (kalibracja == true)
                        {
                            try
                            {
                                //ustawienie gestów
                                XmlDocument XmlDoc = new XmlDocument();
                                XmlDoc.Load(System.AppDomain.CurrentDomain.BaseDirectory + "\\settings.xml");
                                XmlNodeList elemList = XmlDoc.GetElementsByTagName("options");
                                gest[0] = Convert.ToInt32(elemList[0].Attributes["lpm"].Value);

                                //ustawienie gestów
                                XmlDocument XmlDoc2 = new XmlDocument();
                                XmlDoc2.Load(System.AppDomain.CurrentDomain.BaseDirectory + "\\settings.xml");
                                XmlNodeList elemList2 = XmlDoc2.GetElementsByTagName("options");
                                gest[1] = Convert.ToInt32(elemList2[0].Attributes["ppm"].Value);

                                //ustawienie gestów
                                XmlDocument XmlDoc3 = new XmlDocument();
                                XmlDoc3.Load(System.AppDomain.CurrentDomain.BaseDirectory + "\\settings.xml");
                                XmlNodeList elemList3 = XmlDoc3.GetElementsByTagName("options");
                                gest[2] = Convert.ToInt32(elemList3[0].Attributes["scrollup"].Value);

                                //ustawienie gestów
                                XmlDocument XmlDoc4 = new XmlDocument();
                                XmlDoc4.Load(System.AppDomain.CurrentDomain.BaseDirectory + "\\settings.xml");
                                XmlNodeList elemList4 = XmlDoc4.GetElementsByTagName("options");
                                gest[3] = Convert.ToInt32(elemList4[0].Attributes["scrolldown"].Value);
                            }
                            catch
                            {
                                MessageBox.Show("Błąd przy odczycie pliku settings.xml");
                            }



                            newX      = Convert.ToInt32(this.facePoints[23].X);
                            newY      = -Convert.ToInt32(this.facePoints[23].Y);
                            stosunekX = Math.Abs(newX / oldX);
                            stosunekY = Math.Abs(newY / oldY);

                            //odczyt czułości z pliku settings.xml
                            try
                            {
                                XmlDocument XmlDoc = new XmlDocument();
                                XmlDoc.Load(System.AppDomain.CurrentDomain.BaseDirectory + "\\settings.xml");
                                XmlNodeList elemList = XmlDoc.GetElementsByTagName("options");
                                sensitive = Convert.ToInt32(elemList[0].Attributes["sensitive"].Value);
                            }
                            catch
                            {
                                sensitive = 80;
                            }

                            if (Math.Abs(this.facePoints[88].X - this.facePoints[89].X) < 28 && Math.Abs(oldX - newX) > 70 && Math.Abs(oldY - newY) > 70 && Convert.ToInt32(Math.Abs(oldMouseX + (sensitive * stosunekX - sensitive))) < 1980 || Convert.ToInt32(Math.Abs(oldMouseY + (sensitive * stosunekY - sensitive))) < 1200 && Convert.ToInt32(Math.Abs(oldMouseX + (sensitive * stosunekX))) >= 0 && Convert.ToInt32(Math.Abs(oldMouseY + (sensitive * stosunekY))) >= 0)
                            {
                                if (stosunekX > 1.03 && ruchY == false)
                                {
                                    System.Windows.Forms.Cursor.Position = new System.Drawing.Point(Convert.ToInt32(Math.Abs(oldMouseX + (sensitive * stosunekX - sensitive))), Convert.ToInt32(Math.Abs(oldMouseY)));
                                    oldMouseX = Convert.ToInt32(Math.Abs(oldMouseX + (sensitive * stosunekX - sensitive)));
                                    ruchX     = true;
                                }
                                if (stosunekX < 0.97 && ruchY == false)
                                {
                                    System.Windows.Forms.Cursor.Position = new System.Drawing.Point(Convert.ToInt32(Math.Abs(oldMouseX - (sensitive - (sensitive * stosunekX)))), Convert.ToInt32(Math.Abs(oldMouseY)));
                                    oldMouseX = Convert.ToInt32(Math.Abs(oldMouseX - (sensitive - (sensitive * stosunekX))));
                                    ruchX     = true;
                                }
                                if (stosunekY > 1.03 && ruchX == false)
                                {
                                    System.Windows.Forms.Cursor.Position = new System.Drawing.Point(Convert.ToInt32(Math.Abs(oldMouseX)), Convert.ToInt32(Math.Abs(oldMouseY + (sensitive * stosunekY - sensitive))));
                                    oldMouseY = Convert.ToInt32(Math.Abs(oldMouseY + (sensitive * stosunekY - sensitive)));
                                    ruchY     = true;
                                }
                                if (stosunekY < 0.97 && ruchX == false)
                                {
                                    System.Windows.Forms.Cursor.Position = new System.Drawing.Point(Convert.ToInt32(Math.Abs(oldMouseX)), Convert.ToInt32(Math.Abs(oldMouseY - (sensitive - (sensitive * stosunekY)))));
                                    oldMouseY = Convert.ToInt32(Math.Abs(oldMouseY - (sensitive - (sensitive * stosunekY))));
                                    ruchY     = true;
                                }
                            }


                            //stan spoczynku
                            if (Math.Abs(oldX - newX) < 70)
                            {
                                ruchX = false;
                            }
                            //stan spoczynku
                            if (Math.Abs(oldY - newY) < 70)
                            {
                                ruchY = false;
                            }


                            //PIERWSZY GEST
                            if (gest[0] == 0)
                            {
                                if (this.facePoints[40].Y - this.facePoints[87].Y > 12)
                                {
                                    clickCounter++;
                                    if (clickCounter % 10 == 0)
                                    {
                                        DoMouseClick(0);
                                    }
                                    if (clickCounter == 10000)
                                    {
                                        clickCounter = 0;
                                    }
                                }
                            }
                            if (gest[0] == 1)
                            {
                                if (this.facePoints[57].Y - this.facePoints[51].Y > 15)
                                {
                                    clickCounter++;
                                    if (clickCounter % 10 == 0)
                                    {
                                        DoMouseClick(1);
                                    }
                                    if (clickCounter == 10000)
                                    {
                                        clickCounter = 0;
                                    }
                                }
                            }
                            if (gest[0] == 2)
                            {
                                if (Math.Abs(this.facePoints[88].X - this.facePoints[89].X) > 28 && stosunekY > 1.03)
                                {
                                    clickCounter++;
                                    if (clickCounter % 2 == 0)
                                    {
                                        DoMouseScroll(1);
                                    }
                                    if (clickCounter == 10000)
                                    {
                                        clickCounter = 0;
                                    }
                                }
                            }

                            if (gest[0] == 3)
                            {
                                if (Math.Abs(this.facePoints[88].X - this.facePoints[89].X) > 28 && stosunekY < 0.97)
                                {
                                    clickCounter++;
                                    if (clickCounter % 2 == 0)
                                    {
                                        DoMouseScroll(0);
                                    }
                                    if (clickCounter == 10000)
                                    {
                                        clickCounter = 0;
                                    }
                                }
                            }
                            //DRUGI GEST
                            if (gest[1] == 0)
                            {
                                if (this.facePoints[40].Y - this.facePoints[87].Y > 12)
                                {
                                    clickCounter++;
                                    if (clickCounter % 10 == 0)
                                    {
                                        DoMouseClick(0);
                                    }
                                    if (clickCounter == 10000)
                                    {
                                        clickCounter = 0;
                                    }
                                }
                            }
                            if (gest[1] == 1)
                            {
                                if (this.facePoints[57].Y - this.facePoints[51].Y > 14)
                                {
                                    clickCounter++;
                                    if (clickCounter % 10 == 0)
                                    {
                                        DoMouseClick(1);
                                    }
                                    if (clickCounter == 10000)
                                    {
                                        clickCounter = 0;
                                    }
                                }
                            }
                            if (gest[1] == 2)
                            {
                                if (Math.Abs(this.facePoints[88].X - this.facePoints[89].X) > 28 && stosunekY > 1.03)
                                {
                                    clickCounter++;
                                    if (clickCounter % 2 == 0)
                                    {
                                        DoMouseScroll(1);
                                    }
                                    if (clickCounter == 10000)
                                    {
                                        clickCounter = 0;
                                    }
                                }
                            }
                            if (gest[1] == 3)
                            {
                                if (Math.Abs(this.facePoints[88].X - this.facePoints[89].X) > 28 && stosunekY < 0.97)
                                {
                                    clickCounter++;
                                    if (clickCounter % 2 == 0)
                                    {
                                        DoMouseScroll(0);
                                    }
                                    if (clickCounter == 10000)
                                    {
                                        clickCounter = 0;
                                    }
                                }
                            }
                            //TRZECI GEST
                            if (gest[2] == 0)
                            {
                                if (this.facePoints[40].Y - this.facePoints[87].Y > 12)
                                {
                                    clickCounter++;
                                    if (clickCounter % 10 == 0)
                                    {
                                        DoMouseClick(0);
                                    }
                                    if (clickCounter == 10000)
                                    {
                                        clickCounter = 0;
                                    }
                                }
                            }
                            if (gest[2] == 1)
                            {
                                if (this.facePoints[57].Y - this.facePoints[51].Y > 14)
                                {
                                    clickCounter++;
                                    if (clickCounter % 10 == 0)
                                    {
                                        DoMouseClick(1);
                                    }
                                    if (clickCounter == 10000)
                                    {
                                        clickCounter = 0;
                                    }
                                }
                            }
                            if (gest[2] == 2)
                            {
                                if (Math.Abs(this.facePoints[88].X - this.facePoints[89].X) > 28 && stosunekY > 1.03)
                                {
                                    clickCounter++;
                                    if (clickCounter % 2 == 0)
                                    {
                                        DoMouseScroll(1);
                                    }
                                    if (clickCounter == 10000)
                                    {
                                        clickCounter = 0;
                                    }
                                }
                            }
                            if (gest[2] == 3)
                            {
                                if (Math.Abs(this.facePoints[88].X - this.facePoints[89].X) > 28 && stosunekY < 0.97)
                                {
                                    clickCounter++;
                                    if (clickCounter % 2 == 0)
                                    {
                                        DoMouseScroll(0);
                                    }
                                    if (clickCounter == 10000)
                                    {
                                        clickCounter = 0;
                                    }
                                }
                            }
                            //CZWARTY GEST
                            if (gest[3] == 0)
                            {
                                if (this.facePoints[40].Y - this.facePoints[87].Y > 12)
                                {
                                    clickCounter++;
                                    if (clickCounter % 10 == 0)
                                    {
                                        DoMouseClick(0);
                                    }
                                    if (clickCounter == 10000)
                                    {
                                        clickCounter = 0;
                                    }
                                }
                            }
                            if (gest[3] == 1)
                            {
                                if (this.facePoints[57].Y - this.facePoints[51].Y > 14)
                                {
                                    clickCounter++;
                                    if (clickCounter % 10 == 0)
                                    {
                                        DoMouseClick(1);
                                    }
                                    if (clickCounter == 10000)
                                    {
                                        clickCounter = 0;
                                    }
                                }
                            }
                            if (gest[3] == 2)
                            {
                                if (Math.Abs(this.facePoints[88].X - this.facePoints[89].X) > 28 && stosunekY > 1.03)
                                {
                                    clickCounter++;
                                    if (clickCounter % 2 == 0)
                                    {
                                        DoMouseScroll(1);
                                    }
                                    if (clickCounter == 10000)
                                    {
                                        clickCounter = 0;
                                    }
                                }
                            }
                            if (gest[3] == 3)
                            {
                                if (Math.Abs(this.facePoints[88].X - this.facePoints[89].X) > 28 && stosunekY < 0.97)
                                {
                                    clickCounter++;
                                    if (clickCounter % 2 == 0)
                                    {
                                        DoMouseScroll(0);
                                    }
                                    if (clickCounter == 10000)
                                    {
                                        clickCounter = 0;
                                    }
                                }
                            }
                        }
                    }
                }
            }
Пример #17
0
        /// <summary>
        /// Allows the game to run logic such as updating the world,
        /// checking for collisions, gathering input, and playing audio.
        /// </summary>
        /// <param name="gameTime">Provides a snapshot of timing values.</param>
        protected override void Update(GameTime gameTime)
        {
            // Allows the game to exit
            if (GamePad.GetState(PlayerIndex.One).Buttons.Back == ButtonState.Pressed)
            {
                this.Exit();
            }

            colorData       = colorStream.ColorData;
            depthData       = depthStream.DepthData;
            nearestSkeleton = skeletonStream.Skel;

            if (nearestSkeleton != null && nearestSkeleton.TrackingState == SkeletonTrackingState.Tracked)
            {
                if (this.faceTracker == null)
                {
                    try
                    {
                        this.faceTracker = new FaceTracker(this.chooser.Sensor);
                    }
                    catch (InvalidOperationException)
                    {
                        this.faceTracker = null;
                    }
                }

                if (this.faceTracker != null)
                {
                    FaceTrackFrame faceTrackFrame = this.faceTracker.Track(
                        colorImageFormat,
                        colorData,
                        depthImageFormat,
                        depthData,
                        nearestSkeleton);

                    if (faceTrackFrame.TrackSuccessful)
                    {
                        EnumIndexableCollection <FeaturePoint, Vector3DF> shapePoints          = faceTrackFrame.Get3DShape();
                        EnumIndexableCollection <FeaturePoint, PointF>    projectedShapePoints = faceTrackFrame.GetProjected3DShape();

                        yaw   = -MathHelper.ToRadians(faceTrackFrame.Rotation.Y);
                        pitch = -MathHelper.ToRadians(faceTrackFrame.Rotation.X);
                        roll  = MathHelper.ToRadians(faceTrackFrame.Rotation.Z);

                        vector.X = 9.3f * (shapePoints[4].X / shapePoints[4].Z);
                        vector.Y = 9.3f * (shapePoints[4].Y / shapePoints[4].Z) * 0.95f;
                        vector.Z = 0;
                        scale    = 0.4f;

                        Window.Title = shapePoints[4].X.ToString() + " " + shapePoints[4].Y.ToString() + " " + shapePoints[4].Z.ToString();
                    }
                    else
                    {
                        scale = 0;
                    }
                }
            }

            if (gameTime.TotalGameTime.Seconds > 3)
            {
            }
            base.Update(gameTime);
        }
Пример #18
0
        private void OnAllFramesReady(object sender, Microsoft.Kinect.AllFramesReadyEventArgs allFramesReadyEventArgs)
        {
            ColorImageFrame colorImageFrame = null;
            DepthImageFrame depthImageFrame = null;
            SkeletonFrame   skeletonFrame   = null;

            try
            {
                colorImageFrame = allFramesReadyEventArgs.OpenColorImageFrame();
                depthImageFrame = allFramesReadyEventArgs.OpenDepthImageFrame();
                skeletonFrame   = allFramesReadyEventArgs.OpenSkeletonFrame();

                if (colorImageFrame == null || depthImageFrame == null || skeletonFrame == null)
                {
                    return;
                }

                // Check for image format changes.  The FaceTracker doesn't
                // deal with that so we need to reset.
                if (this.depthImageFormat != depthImageFrame.Format)
                {
                    this.depthImage       = null;
                    this.depthImageFormat = depthImageFrame.Format;
                }

                if (this.colorImageFormat != colorImageFrame.Format)
                {
                    this.colorImage       = null;
                    this.colorImageFormat = colorImageFrame.Format;
                }

                // Create any buffers to store copies of the data we work with
                if (this.depthImage == null)
                {
                    this.depthImage = new short[depthImageFrame.PixelDataLength];
                }

                if (this.colorImage == null)
                {
                    this.colorImage = new byte[colorImageFrame.PixelDataLength];
                }

                // Get the skeleton information
                if (this.SkeletonData == null || this.SkeletonData.Length != skeletonFrame.SkeletonArrayLength)
                {
                    this.SkeletonData = new Skeleton[skeletonFrame.SkeletonArrayLength];
                }

                colorImageFrame.CopyPixelDataTo(this.colorImage);
                depthImageFrame.CopyPixelDataTo(this.depthImage);
                skeletonFrame.CopySkeletonDataTo(this.SkeletonData);
                Skeleton activeSkeleton = null;
                activeSkeleton = (from skel in this.SkeletonData where skel.TrackingState == SkeletonTrackingState.Tracked select skel).FirstOrDefault();


                //Idea: Separate Eye-Parts of Color Image
                //Use learning Algorithm for right and left eye
                //Detect blink on separated parts of color Image

                //colorImage is one dimensional array with 640 x 480 x 4 (RGBA) values


                if (activeSkeleton != null)
                {
                    FaceTrackFrame currentFaceFrame = faceTracker.Track(ColorImageFormat.RgbResolution640x480Fps30, colorImage, depthImageFormat, depthImage, activeSkeleton);
                    float          browRaiserValue  = currentFaceFrame.GetAnimationUnitCoefficients()[AnimationUnit.BrowRaiser];
                    float          browLowererValue = currentFaceFrame.GetAnimationUnitCoefficients()[AnimationUnit.BrowLower];
                    tbBrowLowerer.Text = browLowererValue.ToString();
                    tbBrowRaiser.Text  = browRaiserValue.ToString();
                    //Get relevant Points for blink detection
                    //Left eye
                    int    minX    = (int)Math.Round(currentFaceFrame.GetProjected3DShape()[FeaturePoint.AboveOneFourthLeftEyelid].X);
                    int    minY    = (int)Math.Round(currentFaceFrame.GetProjected3DShape()[FeaturePoint.AboveOneFourthLeftEyelid].Y);
                    int    maxX    = (int)Math.Round(currentFaceFrame.GetProjected3DShape()[FeaturePoint.BelowThreeFourthLeftEyelid].X);
                    int    maxY    = (int)Math.Round(currentFaceFrame.GetProjected3DShape()[FeaturePoint.BelowThreeFourthLeftEyelid].Y);
                    Bitmap leftEye = EyeExtract(colorImageFrame, currentFaceFrame, minX, minY, maxX, maxY, false);
                    pbLeftEye.Image = leftEye;

                    //Right eye
                    minX = (int)Math.Round(currentFaceFrame.GetProjected3DShape()[FeaturePoint.AboveThreeFourthRightEyelid].X);
                    minY = (int)Math.Round(currentFaceFrame.GetProjected3DShape()[FeaturePoint.AboveThreeFourthRightEyelid].Y);
                    maxX = (int)Math.Round(currentFaceFrame.GetProjected3DShape()[FeaturePoint.OneFourthBottomRightEyelid].X);
                    maxY = (int)Math.Round(currentFaceFrame.GetProjected3DShape()[FeaturePoint.OneFourthBottomRightEyelid].Y);

                    Bitmap rightEye = EyeExtract(colorImageFrame, currentFaceFrame, minX, minY, maxX, maxY, true);
                    pbRightEye.Image = rightEye;

                    //Wende Kantenfilter auf die beiden Augen an.
                    double dxRight;
                    double dyRight;
                    double dxLeft;
                    double dyLeft;
                    if (rightEye != null && leftEye != null)
                    {
                        Bitmap edgePicRight = Convolution(ConvertGrey(rightEye), true, out dxRight, out dyRight);
                        Bitmap edgePicLeft  = Convolution(ConvertGrey(leftEye), false, out dxLeft, out dyLeft);



                        //If Face is rotated, move Mouse
                        if (headRotationHistory.Count > filterLength && currentFaceFrame.TrackSuccessful)
                        {
                            int x = 0;
                            int y = 0;

                            //Method 1: Ohne Glättung
                            //ScaleXY(currentFaceFrame.Rotation, out x, out y);
                            //MouseControl.Move(x, y);

                            ////Method 2: Glättung über die letzten x Bilder:
                            //int i = 0;
                            //Vector3DF rotationMedium = new Vector3DF();
                            //while (i < 10 && headRotationHistory.Count - 1 > i)
                            //{
                            //    i++;
                            //    rotationMedium.X += headRotationHistory[headRotationHistory.Count - 1 - i].X;
                            //    rotationMedium.Y += headRotationHistory[headRotationHistory.Count - 1 - i].Y;
                            //}
                            //rotationMedium.X = rotationMedium.X / i;
                            //rotationMedium.Y = rotationMedium.Y / i;
                            //ScaleXY(rotationMedium, out x, out y);
                            //MouseControl.Move(x, y);

                            //Method 3: Gauß-Filter: Gewichte die letzten Bilder stärker.



                            Vector3DF rotationMedium = new Vector3DF();
                            rotationMedium.X = currentFaceFrame.Rotation.X * gaussFilter[0];
                            rotationMedium.Y = currentFaceFrame.Rotation.Y * gaussFilter[0];
                            int i = 0;
                            while (i < filterLength - 1)
                            {
                                i++;
                                rotationMedium.X += (headRotationHistory[headRotationHistory.Count - 1 - i].X * gaussFilter[i]);
                                rotationMedium.Y += (headRotationHistory[headRotationHistory.Count - 1 - i].Y * gaussFilter[i]);
                            }
                            rotationMedium.X = (float)(rotationMedium.X / gaussFactor);
                            rotationMedium.Y = (float)(rotationMedium.Y / gaussFactor);
                            ScaleXY(rotationMedium, out x, out y);

                            MouseControl.Move(x, y);
                            //Method 4: Quadratische Glättung
                            //double deltaX = ((-currentFaceFrame.Rotation.Y) - (-headRotationHistory.Last().Y));
                            //double deltaY = ((-currentFaceFrame.Rotation.X) - (-headRotationHistory.Last().X));
                            //if (deltaX < 0)
                            //    deltaX = -Math.Pow(deltaX, 2) * 4;
                            //else
                            //    deltaX = Math.Pow(deltaX, 2) * 4;
                            //if (deltaY < 0)
                            //    deltaY = -Math.Pow(deltaY, 2) * 5;
                            //else
                            //    deltaY = Math.Pow(deltaY, 2) * 5;
                            //MouseControl.DeltaMove((int)Math.Round(deltaX, 0), (int)Math.Round(deltaY));
                        }

                        headRotationHistory.Add(currentFaceFrame.Rotation);
                        if (headRotationHistory.Count >= 100)
                        {
                            headRotationHistory.RemoveAt(0);
                        }
                    }
                }
            }
            catch (Exception e)
            {
            }
            finally
            {
                if (colorImageFrame != null)
                {
                    colorImageFrame.Dispose();
                }

                if (depthImageFrame != null)
                {
                    depthImageFrame.Dispose();
                }

                if (skeletonFrame != null)
                {
                    skeletonFrame.Dispose();
                }
            }
        }
Пример #19
0
        private void OnAllFramesReady(object sender, AllFramesReadyEventArgs allFramesReadyEventArgs)
        {
            ColorImageFrame colorImageFrame = null;
            DepthImageFrame depthImageFrame = null;
            SkeletonFrame   skeletonFrame   = null;

            File.AppendAllText("mouseLog.txt", DateTime.Now + " - All Kinect frames ready.\n");
            try
            {
                colorImageFrame = allFramesReadyEventArgs.OpenColorImageFrame();
                depthImageFrame = allFramesReadyEventArgs.OpenDepthImageFrame();
                skeletonFrame   = allFramesReadyEventArgs.OpenSkeletonFrame();

                if (colorImageFrame == null || depthImageFrame == null || skeletonFrame == null)
                {
                    File.AppendAllText("mouseLog.txt", DateTime.Now + " - Color- depth or Skeletonframe is null. Aborting Frame.\n");
                    return;
                }
                else //if (status == StatusEnum.ReadyActivated || status == StatusEnum.Initializing || status == StatusEnum.UnreadyDeactivated)
                {
                    // Check for image format changes.  The FaceTracker doesn't
                    // deal with that so we need to reset.
                    HandlePossibleImageFormatChanges(colorImageFrame, depthImageFrame);
                    WriteDataToMembers(colorImageFrame, depthImageFrame, skeletonFrame);
                    Skeleton activeSkeleton = null;
                    activeSkeleton = (from skel in this.SkeletonData where skel.TrackingState == SkeletonTrackingState.Tracked select skel).FirstOrDefault();

                    if (activeSkeleton != null)
                    {
                        File.AppendAllText("mouseLog.txt", DateTime.Now + " - Skeleton is there. Trying to find face.\n");
                        FaceTrackFrame currentFaceFrame = faceTracker.Track(ColorImageFormat.RgbResolution640x480Fps30, colorImage, depthImageFormat, depthImage, activeSkeleton);
                        LogFaceDetection(currentFaceFrame);

                        //Get relevant Points for blink detection
                        //Left eye
                        int    minX    = (int)Math.Round(currentFaceFrame.GetProjected3DShape()[FeaturePoint.AboveOneFourthLeftEyelid].X);
                        int    minY    = (int)Math.Round(currentFaceFrame.GetProjected3DShape()[FeaturePoint.AboveOneFourthLeftEyelid].Y);
                        int    maxX    = (int)Math.Round(currentFaceFrame.GetProjected3DShape()[FeaturePoint.BelowThreeFourthLeftEyelid].X);
                        int    maxY    = (int)Math.Round(currentFaceFrame.GetProjected3DShape()[FeaturePoint.BelowThreeFourthLeftEyelid].Y);
                        Bitmap leftEye = EyeExtract(colorImageFrame, currentFaceFrame, minX, minY, maxX, maxY, false);

                        //Right eye
                        minX = (int)Math.Round(currentFaceFrame.GetProjected3DShape()[FeaturePoint.AboveThreeFourthRightEyelid].X);
                        minY = (int)Math.Round(currentFaceFrame.GetProjected3DShape()[FeaturePoint.AboveThreeFourthRightEyelid].Y);
                        maxX = (int)Math.Round(currentFaceFrame.GetProjected3DShape()[FeaturePoint.OneFourthBottomRightEyelid].X);
                        maxY = (int)Math.Round(currentFaceFrame.GetProjected3DShape()[FeaturePoint.OneFourthBottomRightEyelid].Y);

                        Bitmap rightEye  = EyeExtract(colorImageFrame, currentFaceFrame, minX, minY, maxX, maxY, true);
                        Bitmap leftEye2  = null;
                        Bitmap rightEye2 = null;
                        if (leftEye != null)
                        {
                            leftEye2 = new Bitmap(leftEye);
                        }
                        if (rightEye != null)
                        {
                            rightEye2 = new Bitmap(rightEye);
                        }

                        this.pbLeft.BeginInvoke((MethodInvoker)(() => this.pbLeft.Image = rightEye2));
                        this.pbLeft.BeginInvoke((MethodInvoker)(() => this.pbRight.Image = leftEye2));

                        //Wende Kantenfilter auf die beiden Augen an.
                        if (rightEye != null && leftEye != null)
                        {
                            Dictionary <string, int> angleCount;
                            Bitmap edgePicRight   = Convolution(ConvertGrey(rightEye), true, out angleCount);
                            bool   rightEyeClosed = IsEyeClosed(angleCount);
                            Bitmap edgePicLeft    = Convolution(ConvertGrey(leftEye), false, out angleCount);
                            bool   leftEyeClosed  = IsEyeClosed(angleCount);

                            if (rightEyeClosedHistory.Count > 100)
                            {
                                rightEyeClosedHistory.RemoveAt(0);
                            }
                            if (leftEyeClosedHistory.Count > 100)
                            {
                                leftEyeClosedHistory.RemoveAt(0);
                            }
                            leftEyeClosedHistory.Add(leftEyeClosed);
                            rightEyeClosedHistory.Add(rightEyeClosed);

                            //If Face is rotated, move Mouse
                            MoveMouseAccordingToFaceRotation(currentFaceFrame);
                        }
                        else
                        {
                            File.AppendAllText("mouseLog.txt", DateTime.Now + " - Face recognized but couldn't find eye in face.\n");
                        }
                        clickDelay++;

                        headRotationHistory.Add(currentFaceFrame.Rotation);
                        if (headRotationHistory.Count >= 100)
                        {
                            headRotationHistory.RemoveAt(0);
                        }
                    }
                    else
                    {
                        File.AppendAllText("mouseLog.txt", DateTime.Now + " - Active Skeleton is null. Couldn't analyze frame.\n");
                    }
                }
            }
            catch (Exception e)
            {
                File.AppendAllText("mouseLog.txt", DateTime.Now + " - Error during frame analyzation.\n" + e.ToString());
            }
            finally
            {
                if (colorImageFrame != null)
                {
                    colorImageFrame.Dispose();
                }

                if (depthImageFrame != null)
                {
                    depthImageFrame.Dispose();
                }

                if (skeletonFrame != null)
                {
                    skeletonFrame.Dispose();
                }
            }
        }
            /// <summary>
            /// Updates the face tracking information for this skeleton
            /// </summary>
            internal void OnFrameReady(KinectSensor kinectSensor, ColorImageFormat colorImageFormat, byte[] colorImage, DepthImageFormat depthImageFormat, short[] depthImage, Skeleton skeletonOfInterest)
            {
                //  int cursorX = 500;
                //  int cursorY = 500;
                //  bool click = false;

                this.skeletonTrackingState = skeletonOfInterest.TrackingState;

                if (this.skeletonTrackingState != SkeletonTrackingState.Tracked)
                {
                    // nothing to do with an untracked skeleton.
                    return;
                }

                if (this.faceTracker == null)
                {
                    try
                    {
                        this.faceTracker = new FaceTracker(kinectSensor);
                    }
                    catch (InvalidOperationException)
                    {
                        // During some shutdown scenarios the FaceTracker
                        // is unable to be instantiated.  Catch that exception
                        // and don't track a face.
                        Debug.WriteLine("AllFramesReady - creating a new FaceTracker threw an InvalidOperationException");
                        this.faceTracker = null;
                    }
                }

                if (this.faceTracker != null)
                {
                    FaceTrackFrame frame = this.faceTracker.Track(
                        colorImageFormat, colorImage, depthImageFormat, depthImage, skeletonOfInterest);

                    this.lastFaceTrackSucceeded = frame.TrackSuccessful;
                    if (this.lastFaceTrackSucceeded)
                    {
                        if (faceTriangles == null)
                        {
                            // only need to get this once.  It doesn't change.
                            faceTriangles = frame.GetTriangles();
                        }

                        this.facePoints = frame.GetProjected3DShape();

                        if (Globals.cambio == false)
                        {
                            int Pitch = (int)frame.Rotation.X;
                            int Yaw   = (int)frame.Rotation.Y;

                            /*  if ((Yaw > 20) && (Pitch > 0) || (Yaw < -20) && (Pitch > 0) || (Yaw < -20) && (Pitch < -10) || (Yaw > 20) && (Pitch < -10)) //si va en diagonal
                             * {
                             *    if ((Yaw > 20) && (Pitch > 0))//arribaderecha
                             *    {
                             *        Globals.YAW = 1;
                             *        Globals.PITCH = 1;
                             *    }
                             *    if ((Yaw < -20) && (Pitch > 0))//abajoderecha
                             *    {
                             *        Globals.YAW = -1;
                             *        Globals.PITCH = 1;
                             *    }
                             *    if ((Yaw < -20) && (Pitch < -10))//abajoizda
                             *    {
                             *        Globals.YAW = -1;
                             *        Globals.PITCH = -1;
                             *    }
                             *    if ((Yaw > 20) && (Pitch < -10))//arribaizda
                             *    {
                             *        Globals.YAW = 1;
                             *        Globals.PITCH = -1;
                             *    }
                             * }
                             * else
                             * { */
                            if (Yaw > 20)    //izquierda
                            {
                                Globals.YAW = 1;
                            }
                            if (Yaw < -20)    //derecha
                            {
                                Globals.YAW = -1;
                            }
                            if ((Yaw < 20) && (Yaw > -20))
                            {
                                Globals.YAW = 0;
                            }
                            if (Pitch > 0)    //arriba
                            {
                                Globals.PITCH = 1;
                            }
                            if (Pitch < -10)    //abajo
                            {
                                Globals.PITCH = -1;
                            }
                            if ((Pitch < 0) && (Pitch > -10))
                            {
                                Globals.PITCH = 0;
                            }
                            //}
                        }
                    }
                }
            }
Пример #21
0
            /// <summary>
            /// Updates the face tracking information for this skeleton
            /// </summary>
            internal void OnFrameReady(KinectSensor kinectSensor, ColorImageFormat colorImageFormat, byte[] colorImage, DepthImageFormat depthImageFormat, short[] depthImage, Skeleton skeletonOfInterest)
            {
                this.skeletonTrackingState = skeletonOfInterest.TrackingState;

                if (this.skeletonTrackingState != SkeletonTrackingState.Tracked)
                {
                    // if the current skeleton is not tracked, track it now
                    //kinectSensor.SkeletonStream.ChooseSkeletons(skeletonOfInterest.TrackingId);
                }

                if (this.faceTracker == null)
                {
                    try
                    {
                        this.faceTracker = new FaceTracker(kinectSensor);
                    }
                    catch (InvalidOperationException)
                    {
                        // During some shutdown scenarios the FaceTracker
                        // is unable to be instantiated.  Catch that exception
                        // and don't track a face.
                        Debug.WriteLine("AllFramesReady - creating a new FaceTracker threw an InvalidOperationException");
                        this.faceTracker = null;
                    }
                }

                if (this.faceTracker != null)
                {
                    // hack to make this face tracking detect the face even when it is not actually tracked
                    // <!>need to confirm if it works
                    //skeletonOfInterest.TrackingState = SkeletonTrackingState.Tracked;

                    FaceTrackFrame frame = this.faceTracker.Track(
                        colorImageFormat, colorImage, depthImageFormat, depthImage, skeletonOfInterest);
                    //new Microsoft.Kinect.Toolkit.FaceTracking.Rect(skeletonOfInterest.Position.));

                    this.lastFaceTrackSucceeded = frame.TrackSuccessful;
                    if (this.lastFaceTrackSucceeded)
                    {
                        if (faceTriangles == null)
                        {
                            // only need to get this once.  It doesn't change.
                            faceTriangles = frame.GetTriangles();
                        }
                        if (faceTag == null)
                        {
                            // here call the face detection
                            faceTag = new FaceRecognizer().getFaceTag(this.colorImageBmp);

                            if (faceTag != null)
                            {
                                Global.StatusBarText.Text = "Found " + faceTag + "!";
                                if (Global.trackedPeople.ContainsKey(skeletonOfInterest))
                                {
                                    Global.trackedPeople[skeletonOfInterest] = faceTag;
                                }
                                else
                                {
                                    Global.trackedPeople.Add(skeletonOfInterest, faceTag);
                                }
                            }
                        }
                        this.facePoints = frame.GetProjected3DShape();
                        this.faceRect   = frame.FaceRect;
                    }
                }
            }
Пример #22
0
        void nui_AllFramesReady(object sender, AllFramesReadyEventArgs e)
        {
            ColorImageFrame colorImageFrame = null;
            DepthImageFrame depthImageFrame = null;
            SkeletonFrame   skeletonFrame   = null;

            try
            {
                colorImageFrame = e.OpenColorImageFrame();
                depthImageFrame = e.OpenDepthImageFrame();
                skeletonFrame   = e.OpenSkeletonFrame();

                if (colorImageFrame == null || depthImageFrame == null || skeletonFrame == null)
                {
                    return;
                }

                if (this.depthImageFormat != depthImageFrame.Format)
                {
                    this.depthImage       = null;
                    this.depthImageFormat = depthImageFrame.Format;
                }

                if (this.colorImageFormat != colorImageFrame.Format)
                {
                    this.colorImage       = null;
                    this.colorImageFormat = colorImageFrame.Format;
                }

                if (this.depthImage == null)
                {
                    this.depthImage = new short[depthImageFrame.PixelDataLength];
                }

                if (this.colorImage == null)
                {
                    this.colorImage = new byte[colorImageFrame.PixelDataLength];
                }

                if (this.skeletonData == null || this.skeletonData.Length != skeletonFrame.SkeletonArrayLength)
                {
                    this.skeletonData = new Skeleton[skeletonFrame.SkeletonArrayLength];
                }

                colorImageFrame.CopyPixelDataTo(this.colorImage);
                depthImageFrame.CopyPixelDataTo(this.depthImage);
                skeletonFrame.CopySkeletonDataTo(this.skeletonData);
            }
            finally
            {
                if (colorImageFrame != null)
                {
                    colorImageFrame.Dispose();
                }

                if (depthImageFrame != null)
                {
                    depthImageFrame.Dispose();
                }

                if (skeletonFrame != null)
                {
                    skeletonFrame.Dispose();
                }
            }


            using (depthImageFrame)
            {
                if (depthImageFrame != null)
                {
                    foreach (Skeleton skeleton in skeletonData)
                    {
                        if (skeleton.TrackingState == SkeletonTrackingState.Tracked || skeleton.TrackingState == SkeletonTrackingState.PositionOnly)
                        {
                            /////////// 머리 정보를 받아온다 ///////////////
                            Joint joint = skeleton.Joints[JointType.Head];

                            DepthImagePoint depthPoint;
                            depthPoint = depthImageFrame.MapFromSkeletonPoint(joint.Position);

                            System.Windows.Point point = new System.Windows.Point((int)(image1.ActualWidth * depthPoint.X
                                                                                        / depthImageFrame.Width),
                                                                                  (int)(image1.ActualHeight * depthPoint.Y
                                                                                        / depthImageFrame.Height));

                            textBlock1.Text = string.Format("X:{0:0.00} Y:{1:0.00} Z:{2:0.00}", point.X, point.Y, joint.Position.Z);

                            // 이전 헤드의 위치를 저장한다.
                            m_prevHeadX = m_headX;
                            m_prevHeadY = m_headY;
                            m_headX     = point.X;
                            m_headY     = point.Y;

                            if (Math.Abs(m_prevHeadX - point.X) < 10)
                            {
                                m_headX = m_prevHeadX;
                            }

                            if (Math.Abs(m_prevHeadY - point.Y) < 10)
                            {
                                m_headY = m_prevHeadY;
                            }

                            Canvas.SetLeft(ellipse1, point.X - ellipse1.Width / 2);
                            Canvas.SetTop(ellipse1, point.Y - ellipse1.Height / 2);

                            ////////////// face 정보를 받아온다//////////////////////
                            if (this.faceTracker == null)
                            {
                                try
                                {
                                    this.faceTracker = new FaceTracker(nui1);
                                }
                                catch (InvalidOperationException)
                                {
                                    // During some shutdown scenarios the FaceTracker
                                    // is unable to be instantiated.  Catch that exception
                                    // and don't track a face.
                                    this.faceTracker = null;
                                }
                            }

                            if (this.faceTracker != null)
                            {
                                FaceTrackFrame frame = this.faceTracker.Track(
                                    colorImageFormat, colorImage, depthImageFormat, depthImage, skeleton);

                                if (frame.TrackSuccessful)
                                {
                                    facePoints = frame.GetProjected3DShape();

                                    textBlock2.Text = string.Format("noseX:{0:0.00} noseY:{1:0.00} ", facePoints[107].X, facePoints[107].Y);

                                    m_noseX = facePoints[107].X;
                                    m_noseY = facePoints[107].Y;

                                    Canvas.SetLeft(ellipse2, facePoints[107].X - ellipse2.Width / 2);
                                    Canvas.SetTop(ellipse2, facePoints[107].Y - ellipse2.Width / 2);
                                }
                            }

                            ///////////////고개의 각도를 계산 ////////////////////

                            lineOfSight.X1 = m_headX;
                            lineOfSight.Y1 = m_headY;
                            lineOfSight.X2 = m_noseX;
                            lineOfSight.Y2 = m_noseY;

                            Canvas.SetLeft(m_sightRect, m_headX - m_sightRect.Width / 2);
                            Canvas.SetTop(m_sightRect, m_headY);

                            CheckWhichSight(depthImageFrame, m_noseX, m_noseY);
                        }
                    }
                }
            }
        }
Пример #23
0
            /// <summary>
            /// Updates the face tracking information for this skeleton
            /// </summary>
            ///

            public void OnFrameReady(KinectSensor kinectSensor, ColorImageFormat colorImageFormat, byte[] colorImage, DepthImageFormat depthImageFormat, short[] depthImage, Skeleton skeletonOfInterest)
            {
                //No Touchy
                this.skeletonTrackingState = skeletonOfInterest.TrackingState;
                if (this.skeletonTrackingState != SkeletonTrackingState.Tracked)
                {
                    return;
                }
                if (faceTracker == null)
                {
                    faceTracker = new FaceTracker(kinectSensor);
                }
                frame = this.faceTracker.Track(
                    colorImageFormat, colorImage, depthImageFormat, depthImage, skeletonOfInterest);
                this.lastFaceTrackSucceeded = frame.TrackSuccessful;
                if (this.lastFaceTrackSucceeded)
                {
                    if (faceTriangles == null)
                    {
                        faceTriangles = frame.GetTriangles();
                    }
                    this.facePoints = frame.GetProjected3DShape();



                    //Touchy

                    //Assign Reference points
                    this.absfacePoints = frame.Get3DShape();
                    leftForehead       = this.absfacePoints[FeaturePoint.TopLeftForehead];
                    rightForehead      = this.absfacePoints[FeaturePoint.TopRightForehead];
                    jaw           = this.absfacePoints[FeaturePoint.BottomOfChin];
                    faceRotationX = frame.Rotation.X;
                    faceRotationY = frame.Rotation.Y;
                    faceRotationZ = frame.Rotation.Z;

                    //Calculate Reference Points
                    foreheadReferencePointX = ((rightForehead.X - leftForehead.X) / 2);
                    foreheadReferencePointY = ((rightForehead.Y - leftForehead.Y) / 2);
                    foreheadReferencePointZ = ((rightForehead.Z - leftForehead.Z) / 2);

                    //Set Animation Units
                    AUCoeff        = frame.GetAnimationUnitCoefficients();
                    jawLowererAU   = AUCoeff[AnimationUnit.JawLower];
                    lipStretcherAU = AUCoeff[AnimationUnit.LipStretcher];
                    browRaiserAU   = AUCoeff[AnimationUnit.BrowRaiser];
                    setJawData(jaw.Y, leftForehead.Y, rightForehead.Y, jawLowererAU, lipStretcherAU);

                    rotations = new float[5];
                    //set up matlab
                    matlab = new MLApp.MLApp();
                    matlab.Execute(@"cd C:\Users\Bala\Documents\MATLAB");
                    result = null;

                    //get rotation values
                    rotations[0] = faceRotationX;
                    rotations[1] = faceRotationY;
                    rotations[2] = faceRotationZ;
                    rotations[3] = jawLowererAU;
                    rotations[4] = lipStretcherAU;
                    //Set up GlovePie
                    OscPacket.LittleEndianByteOrder = false;
                    IPEndPoint myapp    = new IPEndPoint(IPAddress.Loopback, 1944);
                    IPEndPoint glovepie = new IPEndPoint(IPAddress.Loopback, 1945);
                    Console.WriteLine(browRaiserAU);

                    matlab.Feval("nnW", 1, out result, rotations[0]);
                    object[] resW = result as object[];
                    nnoutput = (int)((float)resW[0] + 0.5f);
                    if (nnoutput == 1)
                    {
                        commandtoSend = 1;
                    }
                    else
                    {
                        result = null;
                        matlab.Feval("nnA", 1, out result, rotations[1]);
                        object[] resA = result as object[];
                        nnoutput = (int)((float)resA[0] + 0.5f);
                        if (nnoutput == 1)
                        {
                            commandtoSend = 2;
                        }
                        else
                        {
                            result = null;
                            matlab.Feval("nnS", 1, out result, rotations[0]);
                            object[] resS = result as object[];
                            nnoutput = (int)((float)resS[0] + 0.5f);
                            if (nnoutput == 1)
                            {
                                commandtoSend = 3;
                            }
                            else
                            {
                                result = null;
                                matlab.Feval("nnd", 1, out result, rotations[1]);
                                object[] resD = result as object[];
                                nnoutput = (int)((float)resD[0] + 0.5f);
                                if (nnoutput == 1)
                                {
                                    commandtoSend = 4;
                                }
                                else
                                {
                                    result = null;
                                    matlab.Feval("nnLC", 1, out result, rotations[2]);
                                    object[] resLC = result as object[];
                                    nnoutput = (int)((float)resLC[0] + 0.5f);
                                    if (nnoutput == 1)
                                    {
                                        commandtoSend = 5;
                                    }
                                    else
                                    {
                                        result = null;
                                        matlab.Feval("nnRC", 1, out result, rotations[2]);
                                        object[] resRC = result as object[];
                                        nnoutput = (int)((float)resRC[0] + 0.5f);
                                        if (nnoutput == 1)
                                        {
                                            commandtoSend = 6;
                                        }
                                        else
                                        {
                                            result = null;
                                            if (jawLowererAU > 0.7)
                                            {
                                                commandtoSend = 7;
                                            }

                                            /*
                                             * matlab.Feval("nnSpace", 1, out result, rotations[3]);
                                             * object[] resSpace = result as object[];
                                             * nnoutput = (int)((float)resSpace[0] + 0.5f);
                                             * if (nnoutput == 1)
                                             * {
                                             *  commandtoSend = 7;
                                             * }*/
                                            else
                                            {
                                                result = null;
                                                if (browRaiserAU > 0.4)
                                                {
                                                    commandtoSend = 8;
                                                }
                                                else
                                                {
                                                    result        = null;
                                                    commandtoSend = 0;
                                                }

                                                /*result = null;
                                                 * matlab.Feval("nnMiddle", 1, out result, lipStretcherAU);
                                                 * object[] resMiddle = result as object[];
                                                 * nnoutput = (int)((float)resMiddle[0] + 0.5f);
                                                 * if (nnoutput == 1)
                                                 * {
                                                 *  commandtoSend = 8;
                                                 * }
                                                 * else
                                                 * {
                                                 *  result = null;
                                                 *  commandtoSend = 0;
                                                 * }*/
                                            }
                                        }
                                    }
                                }
                            }
                        }
                    }
                    //Console.WriteLine("Iteration Complete");
                    switch (commandtoSend)
                    {
                    case 0:
                        msg = new OscMessage(myapp, "/move/w", 0.0f);
                        msg.Send(glovepie);
                        msg = new OscMessage(myapp, "/move/a", 0.0f);
                        msg.Send(glovepie);
                        msg = new OscMessage(myapp, "/move/s", 0.0f);
                        msg.Send(glovepie);
                        msg = new OscMessage(myapp, "/move/d", 0.0f);
                        msg.Send(glovepie);
                        msg = new OscMessage(myapp, "/move/lc", 0.0f);
                        msg.Send(glovepie);
                        msg = new OscMessage(myapp, "/move/rc", 0.0f);
                        msg.Send(glovepie);
                        msg = new OscMessage(myapp, "/move/middle", 0.0f);
                        msg.Send(glovepie);
                        break;

                    case 1:
                        msg = new OscMessage(myapp, "/move/w", 10.0f);
                        msg.Send(glovepie);
                        msg = new OscMessage(myapp, "/move/a", 0.0f);
                        msg.Send(glovepie);
                        msg = new OscMessage(myapp, "/move/s", 0.0f);
                        msg.Send(glovepie);
                        msg = new OscMessage(myapp, "/move/d", 0.0f);
                        msg.Send(glovepie);
                        msg = new OscMessage(myapp, "/move/lc", 0.0f);
                        msg.Send(glovepie);
                        msg = new OscMessage(myapp, "/move/rc", 0.0f);
                        msg.Send(glovepie);
                        msg = new OscMessage(myapp, "/move/space", 0.0f);
                        msg.Send(glovepie);
                        msg = new OscMessage(myapp, "/move/middle", 0.0f);
                        msg.Send(glovepie);
                        break;

                    case 2:
                        msg = new OscMessage(myapp, "/move/w", 0.0f);
                        msg.Send(glovepie);
                        msg = new OscMessage(myapp, "/move/a", 10.0f);
                        msg.Send(glovepie);
                        msg = new OscMessage(myapp, "/move/s", 0.0f);
                        msg.Send(glovepie);
                        msg = new OscMessage(myapp, "/move/d", 0.0f);
                        msg.Send(glovepie);
                        msg = new OscMessage(myapp, "/move/lc", 0.0f);
                        msg.Send(glovepie);
                        msg = new OscMessage(myapp, "/move/rc", 0.0f);
                        msg.Send(glovepie);
                        msg = new OscMessage(myapp, "/move/space", 0.0f);
                        msg.Send(glovepie);
                        msg = new OscMessage(myapp, "/move/middle", 0.0f);
                        msg.Send(glovepie);
                        break;

                    case 3:
                        msg = new OscMessage(myapp, "/move/w", 0.0f);
                        msg.Send(glovepie);
                        msg = new OscMessage(myapp, "/move/a", 0.0f);
                        msg.Send(glovepie);
                        msg = new OscMessage(myapp, "/move/s", 10.0f);
                        msg.Send(glovepie);
                        msg = new OscMessage(myapp, "/move/d", 0.0f);
                        msg.Send(glovepie);
                        msg = new OscMessage(myapp, "/move/lc", 0.0f);
                        msg.Send(glovepie);
                        msg = new OscMessage(myapp, "/move/rc", 0.0f);
                        msg.Send(glovepie);
                        msg = new OscMessage(myapp, "/move/space", 0.0f);
                        msg.Send(glovepie);
                        msg = new OscMessage(myapp, "/move/middle", 0.0f);
                        msg.Send(glovepie);
                        break;

                    case 4:
                        msg = new OscMessage(myapp, "/move/w", 0.0f);
                        msg.Send(glovepie);
                        msg = new OscMessage(myapp, "/move/a", 0.0f);
                        msg.Send(glovepie);
                        msg = new OscMessage(myapp, "/move/s", 0.0f);
                        msg.Send(glovepie);
                        msg = new OscMessage(myapp, "/move/d", 10.0f);
                        msg.Send(glovepie);
                        msg = new OscMessage(myapp, "/move/lc", 0.0f);
                        msg.Send(glovepie);
                        msg = new OscMessage(myapp, "/move/rc", 0.0f);
                        msg.Send(glovepie);
                        msg = new OscMessage(myapp, "/move/space", 0.0f);
                        msg.Send(glovepie);
                        msg = new OscMessage(myapp, "/move/middle", 0.0f);
                        msg.Send(glovepie);
                        break;

                    case 5:
                        msg = new OscMessage(myapp, "/move/w", 0.0f);
                        msg.Send(glovepie);
                        msg = new OscMessage(myapp, "/move/a", 0.0f);
                        msg.Send(glovepie);
                        msg = new OscMessage(myapp, "/move/s", 0.0f);
                        msg.Send(glovepie);
                        msg = new OscMessage(myapp, "/move/d", 0.0f);
                        msg.Send(glovepie);
                        msg = new OscMessage(myapp, "/move/lc", 10.0f);
                        msg.Send(glovepie);
                        msg = new OscMessage(myapp, "/move/rc", 0.0f);
                        msg.Send(glovepie);
                        msg = new OscMessage(myapp, "/move/space", 0.0f);
                        msg.Send(glovepie);
                        msg = new OscMessage(myapp, "/move/middle", 0.0f);
                        msg.Send(glovepie);
                        break;

                    case 6:
                        msg = new OscMessage(myapp, "/move/w", 0.0f);
                        msg.Send(glovepie);
                        msg = new OscMessage(myapp, "/move/a", 0.0f);
                        msg.Send(glovepie);
                        msg = new OscMessage(myapp, "/move/s", 0.0f);
                        msg.Send(glovepie);
                        msg = new OscMessage(myapp, "/move/d", 0.0f);
                        msg.Send(glovepie);
                        msg = new OscMessage(myapp, "/move/lc", 0.0f);
                        msg.Send(glovepie);
                        msg = new OscMessage(myapp, "/move/rc", 10.0f);
                        msg.Send(glovepie);
                        msg = new OscMessage(myapp, "/move/space", 0.0f);
                        msg.Send(glovepie);
                        msg = new OscMessage(myapp, "/move/middle", 0.0f);
                        msg.Send(glovepie);
                        break;

                    case 7:
                        msg = new OscMessage(myapp, "/move/w", 0.0f);
                        msg.Send(glovepie);
                        msg = new OscMessage(myapp, "/move/a", 0.0f);
                        msg.Send(glovepie);
                        msg = new OscMessage(myapp, "/move/s", 0.0f);
                        msg.Send(glovepie);
                        msg = new OscMessage(myapp, "/move/d", 0.0f);
                        msg.Send(glovepie);
                        msg = new OscMessage(myapp, "/move/lc", 0.0f);
                        msg.Send(glovepie);
                        msg = new OscMessage(myapp, "/move/rc", 0.0f);
                        msg.Send(glovepie);
                        msg = new OscMessage(myapp, "/move/space", 10.0f);
                        msg.Send(glovepie);
                        msg = new OscMessage(myapp, "/move/middle", 0.0f);
                        msg.Send(glovepie);
                        break;

                    case 8:
                        msg = new OscMessage(myapp, "/move/w", 0.0f);
                        msg.Send(glovepie);
                        msg = new OscMessage(myapp, "/move/a", 0.0f);
                        msg.Send(glovepie);
                        msg = new OscMessage(myapp, "/move/s", 0.0f);
                        msg.Send(glovepie);
                        msg = new OscMessage(myapp, "/move/d", 0.0f);
                        msg.Send(glovepie);
                        msg = new OscMessage(myapp, "/move/lc", 0.0f);
                        msg.Send(glovepie);
                        msg = new OscMessage(myapp, "/move/rc", 0.0f);
                        msg.Send(glovepie);
                        msg = new OscMessage(myapp, "/move/space", 0.0f);
                        msg.Send(glovepie);
                        msg = new OscMessage(myapp, "/move/middle", 10.0f);
                        msg.Send(glovepie);
                        break;
                    }
                }
            }
Пример #24
0
        private void KinectSensorOnAllFramesReady(object sender, AllFramesReadyEventArgs e)
        {
            if (frameProccessed[1] == false)
            {
                frameProccessed[1] = true;
            }
            else
            {
                frameProccessed[1] = false;
                return;
            }
            ColorImageFrame colorImageFrame = null;
            DepthImageFrame depthImageFrame = null;
            SkeletonFrame   skeletonFrame   = null;

            try
            {
                colorImageFrame = e.OpenColorImageFrame();
                depthImageFrame = e.OpenDepthImageFrame();
                skeletonFrame   = e.OpenSkeletonFrame();

                if (colorImageFrame == null || depthImageFrame == null || skeletonFrame == null)
                {
                    return;
                }

                if (this.depthImageFormat != depthImageFrame.Format)
                {
                    this.depthImage       = null;
                    this.depthImageFormat = depthImageFrame.Format;
                }

                if (this.colorImageFormat != colorImageFrame.Format)
                {
                    this.colorImage       = null;
                    this.colorImageFormat = colorImageFrame.Format;
                }

                if (this.depthImage == null)
                {
                    this.depthImage = new short[depthImageFrame.PixelDataLength];
                }

                if (this.colorImage == null)
                {
                    this.colorImage = new byte[colorImageFrame.PixelDataLength];
                }

                if (this.skeletonData == null || this.skeletonData.Length != skeletonFrame.SkeletonArrayLength)
                {
                    this.skeletonData = new Skeleton[skeletonFrame.SkeletonArrayLength];
                }

                colorImageFrame.CopyPixelDataTo(this.colorImage);
                depthImageFrame.CopyPixelDataTo(this.depthImage);
                skeletonFrame.CopySkeletonDataTo(this.skeletonData);
            }
            finally
            {
                if (colorImageFrame != null)
                {
                    colorImageFrame.Dispose();
                }

                if (depthImageFrame != null)
                {
                    depthImageFrame.Dispose();
                }

                if (skeletonFrame != null)
                {
                    skeletonFrame.Dispose();
                }

                using (depthImageFrame)
                {
                    if (depthImageFrame != null && skeletonData != null)
                    {
                        foreach (Skeleton sd in skeletonData)
                        {
                            if (sd.TrackingState == SkeletonTrackingState.Tracked || sd.TrackingState == SkeletonTrackingState.PositionOnly)
                            {
                                Joint joint = sd.Joints[JointType.Head];

                                DepthImagePoint  depthPoint;
                                CoordinateMapper coordinateMapper = new CoordinateMapper(frontSensor);
                                depthPoint = coordinateMapper.MapSkeletonPointToDepthPoint(joint.Position, DepthImageFormat.Resolution320x240Fps30);

                                point = new System.Windows.Point((int)(frontSensor.ColorStream.FrameWidth * depthPoint.X
                                                                       / depthImageFrame.Width),
                                                                 (int)(frontSensor.ColorStream.FrameHeight * depthPoint.Y
                                                                       / depthImageFrame.Height));

                                /* textBlock1.Text = string.Format("X:{0:0.00} Y:{1:0.00} Z:{2:0.00}",
                                 *                              point.X,
                                 *                              point.Y,
                                 *                              joint.Position.Z); */

                                Canvas.SetLeft(headEllipse, point.X - headEllipse.Width / 2);
                                Canvas.SetTop(headEllipse, point.Y - headEllipse.Height / 2);

                                if (this.faceTracker == null)
                                {
                                    try
                                    {
                                        this.faceTracker = new FaceTracker(frontSensor);
                                    }
                                    catch (InvalidOperationException)
                                    {
                                        // During some shutdown scenarios the FaceTrack
                                        // is unable to be instantiated.  Catch that exception
                                        // and don't track a face.
                                        this.faceTracker = null;
                                    }
                                }
                                if (this.faceTracker != null)
                                {
                                    FaceTrackFrame frame = this.faceTracker.Track(
                                        colorImageFormat, colorImage, depthImageFormat, depthImage, sd);

                                    if (frame.TrackSuccessful)
                                    {
                                        faceTriangles   = frame.GetTriangles();
                                        this.facePoints = frame.GetProjected3DShape();

                                        var faceModelPts = new List <Point>();
                                        var faceModel    = new List <FaceModelTriangle>();


                                        for (int i = 0; i < this.facePoints.Count; i++)
                                        {
                                            faceModelPts.Add(new Point(this.facePoints[i].X + 0.5f, this.facePoints[i].Y + 0.5f));
                                        }

                                        foreach (var t in faceTriangles)
                                        {
                                            var triangle = new FaceModelTriangle();
                                            triangle.P1 = faceModelPts[t.First];
                                            //triangle.P2 = faceModelPts[t.Second];
                                            //triangle.P3 = faceModelPts[t.Third];
                                            faceModel.Add(triangle);
                                        }

                                        Canvas.SetLeft(noseEllipse, faceModel[108].P1.X - noseEllipse.Width / 2);
                                        Canvas.SetTop(noseEllipse, faceModel[108].P1.Y - noseEllipse.Height / 2);
                                        nosePoint = new Point(faceModel[108].P1.X, faceModel[108].P1.Y);
                                    }
                                }
                            }
                        }
                    }
                }

                getAttentionAngle(nosePoint);
            }
        }
Пример #25
0
        private void OnAllFramesReady(object sender, AllFramesReadyEventArgs allFramesReadyEventArgs)
        {
            ColorImageFrame colorImageFrame = null;
            DepthImageFrame depthImageFrame = null;
            SkeletonFrame   skeletonFrame   = null;

            File.AppendAllText("mouseLog.txt", DateTime.Now + " - All Kinect frames ready.\n");
            try
            {
                colorImageFrame = allFramesReadyEventArgs.OpenColorImageFrame();
                depthImageFrame = allFramesReadyEventArgs.OpenDepthImageFrame();
                skeletonFrame   = allFramesReadyEventArgs.OpenSkeletonFrame();

                if (colorImageFrame == null || depthImageFrame == null || skeletonFrame == null)
                {
                    File.AppendAllText("mouseLog.txt", DateTime.Now + " - Color- depth or Skeletonframe is null. Aborting Frame.\n");
                    return;
                }

                // Check for image format changes.  The FaceTracker doesn't
                // deal with that so we need to reset.
                if (this.depthImageFormat != depthImageFrame.Format)
                {
                    this.depthImage       = null;
                    this.depthImageFormat = depthImageFrame.Format;
                }

                if (this.colorImageFormat != colorImageFrame.Format)
                {
                    this.colorImage       = null;
                    this.colorImageFormat = colorImageFrame.Format;
                }

                // Create any buffers to store copies of the data we work with
                if (this.depthImage == null)
                {
                    this.depthImage = new short[depthImageFrame.PixelDataLength];
                }

                if (this.colorImage == null)
                {
                    this.colorImage = new byte[colorImageFrame.PixelDataLength];
                }


                // Get the skeleton information
                if (this.SkeletonData == null || this.SkeletonData.Length != skeletonFrame.SkeletonArrayLength)
                {
                    this.SkeletonData = new Skeleton[skeletonFrame.SkeletonArrayLength];
                }

                colorImageFrame.CopyPixelDataTo(this.colorImage);
                depthImageFrame.CopyPixelDataTo(this.depthImage);
                skeletonFrame.CopySkeletonDataTo(this.SkeletonData);
                Skeleton activeSkeleton = null;
                activeSkeleton = (from skel in this.SkeletonData where skel.TrackingState == SkeletonTrackingState.Tracked select skel).FirstOrDefault();

                //Idea: Separate Eye-Parts of Color Image
                //Use learning Algorithm for right and left eye
                //Detect blink on separated parts of color Image

                //colorImage is one dimensional array with 640 x 480 x 4 (RGBA) values

                if (activeSkeleton != null)
                {
                    File.AppendAllText("mouseLog.txt", DateTime.Now + " - Skeleton is there. Trying to find face.\n");
                    FaceTrackFrame currentFaceFrame = faceTracker.Track(ColorImageFormat.RgbResolution640x480Fps30, colorImage, depthImageFormat, depthImage, activeSkeleton);
                    if (currentFaceFrame.TrackSuccessful)
                    {
                        File.AppendAllText("mouseLog.txt", DateTime.Now + " - Recognized face successfully.\n");
                    }
                    else
                    {
                        File.AppendAllText("mouseLog.txt", DateTime.Now + " - Couldn't find face in frame.\n");
                    }

                    //Get relevant Points for blink detection
                    //Left eye
                    int    minX    = (int)Math.Round(currentFaceFrame.GetProjected3DShape()[FeaturePoint.AboveOneFourthLeftEyelid].X);
                    int    minY    = (int)Math.Round(currentFaceFrame.GetProjected3DShape()[FeaturePoint.AboveOneFourthLeftEyelid].Y);
                    int    maxX    = (int)Math.Round(currentFaceFrame.GetProjected3DShape()[FeaturePoint.BelowThreeFourthLeftEyelid].X);
                    int    maxY    = (int)Math.Round(currentFaceFrame.GetProjected3DShape()[FeaturePoint.BelowThreeFourthLeftEyelid].Y);
                    Bitmap leftEye = EyeExtract(colorImageFrame, currentFaceFrame, minX, minY, maxX, maxY, false);
                    //this.pbRight.BeginInvoke((MethodInvoker)(() => this.pbRight.Image = leftEye));
                    //

                    //Right eye
                    minX = (int)Math.Round(currentFaceFrame.GetProjected3DShape()[FeaturePoint.AboveThreeFourthRightEyelid].X);
                    minY = (int)Math.Round(currentFaceFrame.GetProjected3DShape()[FeaturePoint.AboveThreeFourthRightEyelid].Y);
                    maxX = (int)Math.Round(currentFaceFrame.GetProjected3DShape()[FeaturePoint.OneFourthBottomRightEyelid].X);
                    maxY = (int)Math.Round(currentFaceFrame.GetProjected3DShape()[FeaturePoint.OneFourthBottomRightEyelid].Y);

                    Bitmap rightEye  = EyeExtract(colorImageFrame, currentFaceFrame, minX, minY, maxX, maxY, true);
                    Bitmap leftEye2  = null;
                    Bitmap rightEye2 = null;
                    if (leftEye != null)
                    {
                        leftEye2 = new Bitmap(leftEye);
                    }
                    if (rightEye != null)
                    {
                        rightEye2 = new Bitmap(rightEye);
                    }
                    // System.Delegate d = new MethodInvoker(SetPictures));
                    //   this.Invoke(SetPictures, leftEye);
                    //pbRight.Image = rightEye;
                    this.pbLeft.BeginInvoke((MethodInvoker)(() => this.pbLeft.Image = rightEye2));
                    this.pbLeft.BeginInvoke((MethodInvoker)(() => this.pbRight.Image = leftEye2));
                    // this.Invoke(new MethodInvoker(SetPictures));
                    //Wende Kantenfilter auf die beiden Augen an.

                    if (rightEye != null && leftEye != null)
                    {
                        Dictionary <string, int> angleCount;
                        Bitmap edgePicRight   = Convolution(ConvertGrey(rightEye), true, out angleCount);
                        bool   rightEyeClosed = IsEyeClosed(angleCount);
                        Bitmap edgePicLeft    = Convolution(ConvertGrey(leftEye), false, out angleCount);
                        bool   leftEyeClosed  = IsEyeClosed(angleCount);
                        //   pbLeftFaltung.Image = edgePicLeft;
                        //   pbRightFaltung.Image = edgePicRight;



                        if (rightEyeClosedHistory.Count > 100)
                        {
                            rightEyeClosedHistory.RemoveAt(0);
                        }
                        if (leftEyeClosedHistory.Count > 100)
                        {
                            leftEyeClosedHistory.RemoveAt(0);
                        }
                        leftEyeClosedHistory.Add(leftEyeClosed);
                        rightEyeClosedHistory.Add(rightEyeClosed);

                        //If Face is rotated, move Mouse
                        if (headRotationHistory.Count > gaussFilter.Count - 1 && leftEyeClosedHistory.Count > nudConvolutionFilterLength.Value && currentFaceFrame.TrackSuccessful)
                        {
                            int   x = 0;
                            int   y = 0;
                            float browRaiserValue  = currentFaceFrame.GetAnimationUnitCoefficients()[AnimationUnit.BrowRaiser];
                            float browLowererValue = currentFaceFrame.GetAnimationUnitCoefficients()[AnimationUnit.BrowLower];
                            float mouthOpenValue   = currentFaceFrame.GetAnimationUnitCoefficients()[AnimationUnit.JawLower];
                            if (browRaiserHistory.Count >= 100)
                            {
                                browRaiserHistory.RemoveAt(0);
                                browLowererHistory.RemoveAt(0);
                                mouthOpenHistory.RemoveAt(0);
                            }
                            browLowererHistory.Add(browLowererValue);
                            browRaiserHistory.Add(browRaiserValue);
                            mouthOpenHistory.Add(mouthOpenValue);

                            //Method 1: Ohne Glättung
                            //ScaleXY(currentFaceFrame.Rotation, out x, out y);
                            //MouseControl.Move(x, y);

                            ////Method 2: Glättung über die letzten x Bilder:
                            //int i = 0;
                            //Vector3DF rotationMedium = new Vector3DF();
                            //while (i < 10 && headRotationHistory.Count - 1 > i)
                            //{
                            //    i++;
                            //    rotationMedium.X += headRotationHistory[headRotationHistory.Count - 1 - i].X;
                            //    rotationMedium.Y += headRotationHistory[headRotationHistory.Count - 1 - i].Y;
                            //}
                            //rotationMedium.X = rotationMedium.X / i;
                            //rotationMedium.Y = rotationMedium.Y / i;
                            //ScaleXY(rotationMedium, out x, out y);
                            //MouseControl.Move(x, y);

                            //Method 3: Gauß-Filter: Gewichte die letzten Bilder stärker.
                            Vector3DF rotationMedium = new Vector3DF();
                            rotationMedium.X = currentFaceFrame.Rotation.X * gaussFilter[0];
                            rotationMedium.Y = currentFaceFrame.Rotation.Y * gaussFilter[0];
                            int i = 0;
                            while (i < gaussFilter.Count - 1)
                            {
                                rotationMedium.X += (headRotationHistory[headRotationHistory.Count - 1 - i].X * gaussFilter[i]);
                                rotationMedium.Y += (headRotationHistory[headRotationHistory.Count - 1 - i].Y * gaussFilter[i]);
                                i++;
                            }
                            rotationMedium.X = (float)(rotationMedium.X / gaussFactor);
                            rotationMedium.Y = (float)(rotationMedium.Y / gaussFactor);
                            ScaleXY(rotationMedium, out x, out y);


                            //Method 4: Quadratische Glättung
                            //double deltaX = ((-currentFaceFrame.Rotation.Y) - (-headRotationHistory.Last().Y));
                            //double deltaY = ((-currentFaceFrame.Rotation.X) - (-headRotationHistory.Last().X));
                            //if (deltaX < 0)
                            //    deltaX = -Math.Pow(deltaX, 2) * 4;
                            //else
                            //    deltaX = Math.Pow(deltaX, 2) * 4;
                            //if (deltaY < 0)
                            //    deltaY = -Math.Pow(deltaY, 2) * 5;
                            //else
                            //    deltaY = Math.Pow(deltaY, 2) * 5;
                            //MouseControl.DeltaMove((int)Math.Round(deltaX, 0), (int)Math.Round(deltaY));


                            //Check for right, left or Double Click
                            //1. Check if there was already a click 20 Frames ago, or if Drag & Drop is active
                            if (clickDelay > nudClickDelay.Value && !pointNClickActive)
                            {
                                //2. If not, calculate mean values of dy's last 16 Frames
                                if (CalculateMeanConvolutionValues())
                                {
                                    clickDelay = 0;
                                }
                                else
                                {
                                    //Else check for open Mouth
                                    if (mouthOpenValue > (float)nudMouthOpenStartThreshold.Value && mouthOpenHistory[mouthOpenHistory.Count - 2] > (float)nudMouthOpenConfirmation.Value && mouthOpenHistory[mouthOpenHistory.Count - 3] > (float)nudMouthOpenConfirmation.Value && mouthOpenHistory[mouthOpenHistory.Count - 4] > (float)nudMouthOpenConfirmation.Value)
                                    {
                                        MouseControl.Move(mousePositionHistory[mousePositionHistory.Count - 4].X, mousePositionHistory[mousePositionHistory.Count - 4].Y);
                                        this.lbAction.Invoke((MethodInvoker)(() => this.lbAction.Items.Add("Left Mouse Down on X: " + mousePositionHistory[mousePositionHistory.Count - 4].X + " Y: " + mousePositionHistory[mousePositionHistory.Count - 4].Y)));
                                        //lbAction.Items.Add("Left Mouse Down on X: " + mousePositionHistory[mousePositionHistory.Count - 4].X + " Y: " + mousePositionHistory[mousePositionHistory.Count - 4].Y);
                                        MouseControl.MouseDownLeft();
                                        pointNClickActive = true;
                                        clickDelay        = 0;
                                    }
                                }
                            }
                            else if (pointNClickActive)
                            {
                                if (mouthOpenValue < (float)nudMouthOpenEndThreshold.Value)
                                {
                                    this.lbAction.Invoke((MethodInvoker)(() => this.lbAction.Items.Add("Left Mouse Up on X: " + x + " Y: " + y)));
                                    MouseControl.MouseUpLeft();
                                    pointNClickActive = false;
                                    clickDelay        = 0;
                                }
                            }
                            MouseControl.Move(x, y);
                            if (browLowererValue > (float)nudBrowLowererStartThreshold.Value)
                            {
                                MouseControl.ScrollDown((int)(-browLowererValue * (int)nudScrollMultiplierDown.Value));
                            }
                            if (browRaiserValue > (float)nudBrowRaiserStartThreshold.Value)
                            {
                                MouseControl.ScrollDown((int)(browRaiserValue * (int)nudScrollMultiplierUp.Value));
                            }
                            if (mousePositionHistory.Count > 100)
                            {
                                mousePositionHistory.RemoveAt(0);
                            }
                            mousePositionHistory.Add(new Microsoft.Kinect.Toolkit.FaceTracking.Point(x, y));
                            File.AppendAllText("mouseLog.txt", DateTime.Now + " - Face and eyes successfully tracked.\n");
                        }
                    }
                    else
                    {
                        File.AppendAllText("mouseLog.txt", DateTime.Now + " - Face recognized but couldn't find eye in face.\n");
                    }
                    clickDelay++;

                    headRotationHistory.Add(currentFaceFrame.Rotation);
                    if (headRotationHistory.Count >= 100)
                    {
                        headRotationHistory.RemoveAt(0);
                    }
                }
                else
                {
                    File.AppendAllText("mouseLog.txt", DateTime.Now + " - Active Skeleton is null. Couldn't analyze frame.\n");
                }
            }
            catch (Exception e)
            {
                File.AppendAllText("mouseLog.txt", DateTime.Now + " - Error during frame analyzation.\n" + e.ToString());
            }
            finally
            {
                if (colorImageFrame != null)
                {
                    colorImageFrame.Dispose();
                }

                if (depthImageFrame != null)
                {
                    depthImageFrame.Dispose();
                }

                if (skeletonFrame != null)
                {
                    skeletonFrame.Dispose();
                }
            }
        }